From 4aab4087dc97906d0b9890035401175cdaab32d4 Mon Sep 17 00:00:00 2001 From: blackhao <13851610112@163.com> Date: Fri, 22 Aug 2025 02:51:50 -0500 Subject: 2.0 --- .../python3.12/site-packages/networkx/__init__.py | 53 + .../networkx/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 1474 bytes .../networkx/__pycache__/conftest.cpython-312.pyc | Bin 0 -> 8423 bytes .../networkx/__pycache__/convert.cpython-312.pyc | Bin 0 -> 18890 bytes .../__pycache__/convert_matrix.cpython-312.pyc | Bin 0 -> 50841 bytes .../networkx/__pycache__/exception.cpython-312.pyc | Bin 0 -> 5399 bytes .../__pycache__/lazy_imports.cpython-312.pyc | Bin 0 -> 7338 bytes .../networkx/__pycache__/relabel.cpython-312.pyc | Bin 0 -> 13304 bytes .../site-packages/networkx/algorithms/__init__.py | 133 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 5784 bytes .../__pycache__/asteroidal.cpython-312.pyc | Bin 0 -> 6192 bytes .../__pycache__/boundary.cpython-312.pyc | Bin 0 -> 5963 bytes .../algorithms/__pycache__/bridges.cpython-312.pyc | Bin 0 -> 7483 bytes .../__pycache__/broadcasting.cpython-312.pyc | Bin 0 -> 7952 bytes .../algorithms/__pycache__/chains.cpython-312.pyc | Bin 0 -> 6146 bytes .../algorithms/__pycache__/chordal.cpython-312.pyc | Bin 0 -> 16071 bytes .../algorithms/__pycache__/clique.cpython-312.pyc | Bin 0 -> 31785 bytes .../algorithms/__pycache__/cluster.cpython-312.pyc | Bin 0 -> 27390 bytes .../communicability_alg.cpython-312.pyc | Bin 0 -> 5569 bytes .../algorithms/__pycache__/core.cpython-312.pyc | Bin 0 -> 20200 bytes .../__pycache__/covering.cpython-312.pyc | Bin 0 -> 5617 bytes .../algorithms/__pycache__/cuts.cpython-312.pyc | Bin 0 -> 11959 bytes .../algorithms/__pycache__/cycles.cpython-312.pyc | Bin 0 -> 49324 bytes .../__pycache__/d_separation.cpython-312.pyc | Bin 0 -> 27738 bytes .../algorithms/__pycache__/dag.cpython-312.pyc | Bin 0 -> 53794 bytes .../__pycache__/distance_measures.cpython-312.pyc | Bin 0 -> 41152 bytes .../__pycache__/distance_regular.cpython-312.pyc | Bin 0 -> 8161 bytes .../__pycache__/dominance.cpython-312.pyc | Bin 0 -> 4796 bytes .../__pycache__/dominating.cpython-312.pyc | Bin 0 -> 9202 bytes .../efficiency_measures.cpython-312.pyc | Bin 0 -> 5591 bytes .../algorithms/__pycache__/euler.cpython-312.pyc | Bin 0 -> 16922 bytes .../__pycache__/graph_hashing.cpython-312.pyc | Bin 0 -> 18524 bytes .../__pycache__/graphical.cpython-312.pyc | Bin 0 -> 16323 bytes .../__pycache__/hierarchy.cpython-312.pyc | Bin 0 -> 2636 bytes .../algorithms/__pycache__/hybrid.cpython-312.pyc | Bin 0 -> 5894 bytes .../algorithms/__pycache__/isolate.cpython-312.pyc | Bin 0 -> 3313 bytes .../__pycache__/link_prediction.cpython-312.pyc | Bin 0 -> 26570 bytes .../lowest_common_ancestors.cpython-312.pyc | Bin 0 -> 9563 bytes .../__pycache__/matching.cpython-312.pyc | Bin 0 -> 32805 bytes .../algorithms/__pycache__/mis.cpython-312.pyc | Bin 0 -> 3249 bytes .../algorithms/__pycache__/moral.cpython-312.pyc | Bin 0 -> 2083 bytes .../node_classification.cpython-312.pyc | Bin 0 -> 8268 bytes .../__pycache__/non_randomness.cpython-312.pyc | Bin 0 -> 4213 bytes .../__pycache__/planar_drawing.cpython-312.pyc | Bin 0 -> 14721 bytes .../__pycache__/planarity.cpython-312.pyc | Bin 0 -> 61022 bytes .../__pycache__/polynomials.cpython-312.pyc | Bin 0 -> 12635 bytes .../__pycache__/reciprocity.cpython-312.pyc | Bin 0 -> 3276 bytes .../algorithms/__pycache__/regular.cpython-312.pyc | Bin 0 -> 11379 bytes .../__pycache__/richclub.cpython-312.pyc | Bin 0 -> 5880 bytes .../__pycache__/similarity.cpython-312.pyc | Bin 0 -> 67243 bytes .../__pycache__/simple_paths.cpython-312.pyc | Bin 0 -> 32164 bytes .../__pycache__/smallworld.cpython-312.pyc | Bin 0 -> 14654 bytes .../algorithms/__pycache__/smetric.cpython-312.pyc | Bin 0 -> 1434 bytes .../__pycache__/sparsifiers.cpython-312.pyc | Bin 0 -> 9486 bytes .../__pycache__/structuralholes.cpython-312.pyc | Bin 0 -> 14716 bytes .../__pycache__/summarization.cpython-312.pyc | Bin 0 -> 24989 bytes .../algorithms/__pycache__/swap.cpython-312.pyc | Bin 0 -> 14523 bytes .../__pycache__/threshold.cpython-312.pyc | Bin 0 -> 35148 bytes .../__pycache__/time_dependent.cpython-312.pyc | Bin 0 -> 7359 bytes .../__pycache__/tournament.cpython-312.pyc | Bin 0 -> 15111 bytes .../algorithms/__pycache__/triads.cpython-312.pyc | Bin 0 -> 16027 bytes .../__pycache__/vitality.cpython-312.pyc | Bin 0 -> 2762 bytes .../algorithms/__pycache__/voronoi.cpython-312.pyc | Bin 0 -> 3298 bytes .../algorithms/__pycache__/walks.cpython-312.pyc | Bin 0 -> 3018 bytes .../algorithms/__pycache__/wiener.cpython-312.pyc | Bin 0 -> 9631 bytes .../networkx/algorithms/approximation/__init__.py | 26 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 1469 bytes .../__pycache__/clique.cpython-312.pyc | Bin 0 -> 8988 bytes .../clustering_coefficient.cpython-312.pyc | Bin 0 -> 2848 bytes .../__pycache__/connectivity.cpython-312.pyc | Bin 0 -> 13719 bytes .../__pycache__/density.cpython-312.pyc | Bin 0 -> 15521 bytes .../__pycache__/distance_measures.cpython-312.pyc | Bin 0 -> 6027 bytes .../__pycache__/dominating_set.cpython-312.pyc | Bin 0 -> 4991 bytes .../__pycache__/kcomponents.cpython-312.pyc | Bin 0 -> 18598 bytes .../__pycache__/matching.cpython-312.pyc | Bin 0 -> 1566 bytes .../__pycache__/maxcut.cpython-312.pyc | Bin 0 -> 5322 bytes .../__pycache__/ramsey.cpython-312.pyc | Bin 0 -> 2259 bytes .../__pycache__/steinertree.cpython-312.pyc | Bin 0 -> 11860 bytes .../__pycache__/traveling_salesman.cpython-312.pyc | Bin 0 -> 59806 bytes .../__pycache__/treewidth.cpython-312.pyc | Bin 0 -> 8895 bytes .../__pycache__/vertex_cover.cpython-312.pyc | Bin 0 -> 3264 bytes .../networkx/algorithms/approximation/clique.py | 259 ++ .../approximation/clustering_coefficient.py | 71 + .../algorithms/approximation/connectivity.py | 412 +++ .../networkx/algorithms/approximation/density.py | 396 +++ .../algorithms/approximation/distance_measures.py | 150 + .../algorithms/approximation/dominating_set.py | 149 + .../algorithms/approximation/kcomponents.py | 367 +++ .../networkx/algorithms/approximation/matching.py | 44 + .../networkx/algorithms/approximation/maxcut.py | 143 + .../networkx/algorithms/approximation/ramsey.py | 53 + .../algorithms/approximation/steinertree.py | 248 ++ .../algorithms/approximation/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 216 bytes .../test_approx_clust_coeff.cpython-312.pyc | Bin 0 -> 2427 bytes .../tests/__pycache__/test_clique.cpython-312.pyc | Bin 0 -> 6360 bytes .../__pycache__/test_connectivity.cpython-312.pyc | Bin 0 -> 11042 bytes .../tests/__pycache__/test_density.cpython-312.pyc | Bin 0 -> 7389 bytes .../test_distance_measures.cpython-312.pyc | Bin 0 -> 4280 bytes .../test_dominating_set.cpython-312.pyc | Bin 0 -> 3870 bytes .../__pycache__/test_kcomponents.cpython-312.pyc | Bin 0 -> 17838 bytes .../__pycache__/test_matching.cpython-312.pyc | Bin 0 -> 652 bytes .../tests/__pycache__/test_maxcut.cpython-312.pyc | Bin 0 -> 5584 bytes .../tests/__pycache__/test_ramsey.cpython-312.pyc | Bin 0 -> 2065 bytes .../__pycache__/test_steinertree.cpython-312.pyc | Bin 0 -> 13867 bytes .../test_traveling_salesman.cpython-312.pyc | Bin 0 -> 46085 bytes .../__pycache__/test_treewidth.cpython-312.pyc | Bin 0 -> 13249 bytes .../__pycache__/test_vertex_cover.cpython-312.pyc | Bin 0 -> 4459 bytes .../approximation/tests/test_approx_clust_coeff.py | 41 + .../algorithms/approximation/tests/test_clique.py | 112 + .../approximation/tests/test_connectivity.py | 199 ++ .../algorithms/approximation/tests/test_density.py | 138 + .../approximation/tests/test_distance_measures.py | 59 + .../approximation/tests/test_dominating_set.py | 78 + .../approximation/tests/test_kcomponents.py | 303 ++ .../approximation/tests/test_matching.py | 8 + .../algorithms/approximation/tests/test_maxcut.py | 94 + .../algorithms/approximation/tests/test_ramsey.py | 31 + .../approximation/tests/test_steinertree.py | 265 ++ .../approximation/tests/test_traveling_salesman.py | 1013 +++++++ .../approximation/tests/test_treewidth.py | 274 ++ .../approximation/tests/test_vertex_cover.py | 68 + .../algorithms/approximation/traveling_salesman.py | 1508 ++++++++++ .../networkx/algorithms/approximation/treewidth.py | 255 ++ .../algorithms/approximation/vertex_cover.py | 83 + .../networkx/algorithms/assortativity/__init__.py | 5 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 511 bytes .../__pycache__/connectivity.cpython-312.pyc | Bin 0 -> 5216 bytes .../__pycache__/correlation.cpython-312.pyc | Bin 0 -> 10840 bytes .../__pycache__/mixing.cpython-312.pyc | Bin 0 -> 8436 bytes .../__pycache__/neighbor_degree.cpython-312.pyc | Bin 0 -> 6150 bytes .../__pycache__/pairs.cpython-312.pyc | Bin 0 -> 4836 bytes .../algorithms/assortativity/connectivity.py | 122 + .../algorithms/assortativity/correlation.py | 302 ++ .../networkx/algorithms/assortativity/mixing.py | 255 ++ .../algorithms/assortativity/neighbor_degree.py | 160 + .../networkx/algorithms/assortativity/pairs.py | 127 + .../algorithms/assortativity/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 216 bytes .../tests/__pycache__/base_test.cpython-312.pyc | Bin 0 -> 5394 bytes .../__pycache__/test_connectivity.cpython-312.pyc | Bin 0 -> 7984 bytes .../__pycache__/test_correlation.cpython-312.pyc | Bin 0 -> 10722 bytes .../tests/__pycache__/test_mixing.cpython-312.pyc | Bin 0 -> 12463 bytes .../test_neighbor_degree.cpython-312.pyc | Bin 0 -> 6668 bytes .../tests/__pycache__/test_pairs.cpython-312.pyc | Bin 0 -> 5358 bytes .../algorithms/assortativity/tests/base_test.py | 81 + .../assortativity/tests/test_connectivity.py | 143 + .../assortativity/tests/test_correlation.py | 122 + .../algorithms/assortativity/tests/test_mixing.py | 174 ++ .../assortativity/tests/test_neighbor_degree.py | 107 + .../algorithms/assortativity/tests/test_pairs.py | 87 + .../networkx/algorithms/asteroidal.py | 164 + .../networkx/algorithms/bipartite/__init__.py | 88 + .../bipartite/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 4114 bytes .../bipartite/__pycache__/basic.cpython-312.pyc | Bin 0 -> 10489 bytes .../__pycache__/centrality.cpython-312.pyc | Bin 0 -> 10724 bytes .../bipartite/__pycache__/cluster.cpython-312.pyc | Bin 0 -> 9432 bytes .../bipartite/__pycache__/covering.cpython-312.pyc | Bin 0 -> 2515 bytes .../bipartite/__pycache__/edgelist.cpython-312.pyc | Bin 0 -> 12978 bytes .../__pycache__/extendability.cpython-312.pyc | Bin 0 -> 5027 bytes .../__pycache__/generators.cpython-312.pyc | Bin 0 -> 25022 bytes .../__pycache__/link_analysis.cpython-312.pyc | Bin 0 -> 13839 bytes .../bipartite/__pycache__/matching.cpython-312.pyc | Bin 0 -> 18868 bytes .../bipartite/__pycache__/matrix.cpython-312.pyc | Bin 0 -> 7967 bytes .../__pycache__/projection.cpython-312.pyc | Bin 0 -> 21883 bytes .../__pycache__/redundancy.cpython-312.pyc | Bin 0 -> 4436 bytes .../bipartite/__pycache__/spectral.cpython-312.pyc | Bin 0 -> 2601 bytes .../networkx/algorithms/bipartite/basic.py | 322 ++ .../networkx/algorithms/bipartite/centrality.py | 290 ++ .../networkx/algorithms/bipartite/cluster.py | 289 ++ .../networkx/algorithms/bipartite/covering.py | 57 + .../networkx/algorithms/bipartite/edgelist.py | 360 +++ .../networkx/algorithms/bipartite/extendability.py | 105 + .../networkx/algorithms/bipartite/generators.py | 603 ++++ .../networkx/algorithms/bipartite/link_analysis.py | 316 ++ .../networkx/algorithms/bipartite/matching.py | 590 ++++ .../networkx/algorithms/bipartite/matrix.py | 168 ++ .../networkx/algorithms/bipartite/projection.py | 526 ++++ .../networkx/algorithms/bipartite/redundancy.py | 112 + .../networkx/algorithms/bipartite/spectral.py | 69 + .../algorithms/bipartite/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 212 bytes .../tests/__pycache__/test_basic.cpython-312.pyc | Bin 0 -> 9929 bytes .../__pycache__/test_centrality.cpython-312.pyc | Bin 0 -> 8280 bytes .../tests/__pycache__/test_cluster.cpython-312.pyc | Bin 0 -> 5661 bytes .../__pycache__/test_covering.cpython-312.pyc | Bin 0 -> 2709 bytes .../__pycache__/test_edgelist.cpython-312.pyc | Bin 0 -> 15058 bytes .../__pycache__/test_extendability.cpython-312.pyc | Bin 0 -> 7486 bytes .../__pycache__/test_generators.cpython-312.pyc | Bin 0 -> 21773 bytes .../__pycache__/test_link_analysis.cpython-312.pyc | Bin 0 -> 9206 bytes .../__pycache__/test_matching.cpython-312.pyc | Bin 0 -> 20154 bytes .../tests/__pycache__/test_matrix.cpython-312.pyc | Bin 0 -> 7479 bytes .../tests/__pycache__/test_project.cpython-312.pyc | Bin 0 -> 25983 bytes .../__pycache__/test_redundancy.cpython-312.pyc | Bin 0 -> 1942 bytes .../test_spectral_bipartivity.cpython-312.pyc | Bin 0 -> 4657 bytes .../algorithms/bipartite/tests/test_basic.py | 125 + .../algorithms/bipartite/tests/test_centrality.py | 192 ++ .../algorithms/bipartite/tests/test_cluster.py | 84 + .../algorithms/bipartite/tests/test_covering.py | 33 + .../algorithms/bipartite/tests/test_edgelist.py | 240 ++ .../bipartite/tests/test_extendability.py | 334 +++ .../algorithms/bipartite/tests/test_generators.py | 407 +++ .../bipartite/tests/test_link_analysis.py | 218 ++ .../algorithms/bipartite/tests/test_matching.py | 327 ++ .../algorithms/bipartite/tests/test_matrix.py | 82 + .../algorithms/bipartite/tests/test_project.py | 407 +++ .../algorithms/bipartite/tests/test_redundancy.py | 35 + .../bipartite/tests/test_spectral_bipartivity.py | 80 + .../site-packages/networkx/algorithms/boundary.py | 168 ++ .../site-packages/networkx/algorithms/bridges.py | 205 ++ .../networkx/algorithms/broadcasting.py | 155 + .../networkx/algorithms/centrality/__init__.py | 20 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 752 bytes .../__pycache__/betweenness.cpython-312.pyc | Bin 0 -> 16288 bytes .../__pycache__/betweenness_subset.cpython-312.pyc | Bin 0 -> 9920 bytes .../__pycache__/closeness.cpython-312.pyc | Bin 0 -> 10639 bytes .../current_flow_betweenness.cpython-312.pyc | Bin 0 -> 14347 bytes ...current_flow_betweenness_subset.cpython-312.pyc | Bin 0 -> 9633 bytes .../current_flow_closeness.cpython-312.pyc | Bin 0 -> 4278 bytes .../__pycache__/degree_alg.cpython-312.pyc | Bin 0 -> 4990 bytes .../__pycache__/dispersion.cpython-312.pyc | Bin 0 -> 3844 bytes .../__pycache__/eigenvector.cpython-312.pyc | Bin 0 -> 15177 bytes .../__pycache__/flow_matrix.cpython-312.pyc | Bin 0 -> 8577 bytes .../centrality/__pycache__/group.cpython-312.pyc | Bin 0 -> 29373 bytes .../__pycache__/harmonic.cpython-312.pyc | Bin 0 -> 3474 bytes .../centrality/__pycache__/katz.cpython-312.pyc | Bin 0 -> 12833 bytes .../__pycache__/laplacian.cpython-312.pyc | Bin 0 -> 6374 bytes .../centrality/__pycache__/load.cpython-312.pyc | Bin 0 -> 7293 bytes .../__pycache__/percolation.cpython-312.pyc | Bin 0 -> 4899 bytes .../__pycache__/reaching.cpython-312.pyc | Bin 0 -> 8465 bytes .../__pycache__/second_order.cpython-312.pyc | Bin 0 -> 6885 bytes .../__pycache__/subgraph_alg.cpython-312.pyc | Bin 0 -> 11022 bytes .../centrality/__pycache__/trophic.cpython-312.pyc | Bin 0 -> 6636 bytes .../__pycache__/voterank_alg.cpython-312.pyc | Bin 0 -> 4063 bytes .../networkx/algorithms/centrality/betweenness.py | 469 +++ .../algorithms/centrality/betweenness_subset.py | 275 ++ .../networkx/algorithms/centrality/closeness.py | 282 ++ .../centrality/current_flow_betweenness.py | 342 +++ .../centrality/current_flow_betweenness_subset.py | 227 ++ .../centrality/current_flow_closeness.py | 96 + .../networkx/algorithms/centrality/degree_alg.py | 150 + .../networkx/algorithms/centrality/dispersion.py | 107 + .../networkx/algorithms/centrality/eigenvector.py | 357 +++ .../networkx/algorithms/centrality/flow_matrix.py | 130 + .../networkx/algorithms/centrality/group.py | 787 +++++ .../networkx/algorithms/centrality/harmonic.py | 89 + .../networkx/algorithms/centrality/katz.py | 331 +++ .../networkx/algorithms/centrality/laplacian.py | 150 + .../networkx/algorithms/centrality/load.py | 200 ++ .../networkx/algorithms/centrality/percolation.py | 128 + .../networkx/algorithms/centrality/reaching.py | 209 ++ .../networkx/algorithms/centrality/second_order.py | 141 + .../networkx/algorithms/centrality/subgraph_alg.py | 342 +++ .../algorithms/centrality/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 213 bytes .../test_betweenness_centrality.cpython-312.pyc | Bin 0 -> 39839 bytes ...t_betweenness_centrality_subset.cpython-312.pyc | Bin 0 -> 19533 bytes .../test_closeness_centrality.cpython-312.pyc | Bin 0 -> 14105 bytes ...ent_flow_betweenness_centrality.cpython-312.pyc | Bin 0 -> 13638 bytes ...w_betweenness_centrality_subset.cpython-312.pyc | Bin 0 -> 9317 bytes .../test_current_flow_closeness.cpython-312.pyc | Bin 0 -> 3150 bytes .../test_degree_centrality.cpython-312.pyc | Bin 0 -> 7345 bytes .../__pycache__/test_dispersion.cpython-312.pyc | Bin 0 -> 3161 bytes .../test_eigenvector_centrality.cpython-312.pyc | Bin 0 -> 10478 bytes .../tests/__pycache__/test_group.cpython-312.pyc | Bin 0 -> 14888 bytes .../test_harmonic_centrality.cpython-312.pyc | Bin 0 -> 8289 bytes .../test_katz_centrality.cpython-312.pyc | Bin 0 -> 17842 bytes .../test_laplacian_centrality.cpython-312.pyc | Bin 0 -> 9836 bytes .../test_load_centrality.cpython-312.pyc | Bin 0 -> 15498 bytes .../test_percolation_centrality.cpython-312.pyc | Bin 0 -> 4303 bytes .../__pycache__/test_reaching.cpython-312.pyc | Bin 0 -> 10903 bytes .../test_second_order_centrality.cpython-312.pyc | Bin 0 -> 4645 bytes .../__pycache__/test_subgraph.cpython-312.pyc | Bin 0 -> 5089 bytes .../tests/__pycache__/test_trophic.cpython-312.pyc | Bin 0 -> 14583 bytes .../__pycache__/test_voterank.cpython-312.pyc | Bin 0 -> 3057 bytes .../tests/test_betweenness_centrality.py | 903 ++++++ .../tests/test_betweenness_centrality_subset.py | 340 +++ .../centrality/tests/test_closeness_centrality.py | 274 ++ .../test_current_flow_betweenness_centrality.py | 197 ++ ...t_current_flow_betweenness_centrality_subset.py | 147 + .../tests/test_current_flow_closeness.py | 43 + .../centrality/tests/test_degree_centrality.py | 144 + .../algorithms/centrality/tests/test_dispersion.py | 73 + .../tests/test_eigenvector_centrality.py | 186 ++ .../algorithms/centrality/tests/test_group.py | 277 ++ .../centrality/tests/test_harmonic_centrality.py | 122 + .../centrality/tests/test_katz_centrality.py | 345 +++ .../centrality/tests/test_laplacian_centrality.py | 220 ++ .../centrality/tests/test_load_centrality.py | 344 +++ .../tests/test_percolation_centrality.py | 87 + .../algorithms/centrality/tests/test_reaching.py | 140 + .../tests/test_second_order_centrality.py | 82 + .../algorithms/centrality/tests/test_subgraph.py | 110 + .../algorithms/centrality/tests/test_trophic.py | 302 ++ .../algorithms/centrality/tests/test_voterank.py | 64 + .../networkx/algorithms/centrality/trophic.py | 181 ++ .../networkx/algorithms/centrality/voterank_alg.py | 95 + .../site-packages/networkx/algorithms/chains.py | 172 ++ .../site-packages/networkx/algorithms/chordal.py | 443 +++ .../site-packages/networkx/algorithms/clique.py | 757 +++++ .../site-packages/networkx/algorithms/cluster.py | 658 +++++ .../networkx/algorithms/coloring/__init__.py | 4 + .../coloring/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 406 bytes .../__pycache__/equitable_coloring.cpython-312.pyc | Bin 0 -> 15409 bytes .../__pycache__/greedy_coloring.cpython-312.pyc | Bin 0 -> 22306 bytes .../algorithms/coloring/equitable_coloring.py | 505 ++++ .../algorithms/coloring/greedy_coloring.py | 565 ++++ .../networkx/algorithms/coloring/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 211 bytes .../__pycache__/test_coloring.cpython-312.pyc | Bin 0 -> 28198 bytes .../algorithms/coloring/tests/test_coloring.py | 863 ++++++ .../networkx/algorithms/communicability_alg.py | 163 + .../networkx/algorithms/community/__init__.py | 28 + .../community/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 1510 bytes .../__pycache__/asyn_fluid.cpython-312.pyc | Bin 0 -> 5639 bytes .../__pycache__/centrality.cpython-312.pyc | Bin 0 -> 7109 bytes .../__pycache__/community_utils.cpython-312.pyc | Bin 0 -> 1511 bytes .../community/__pycache__/divisive.cpython-312.pyc | Bin 0 -> 7650 bytes .../community/__pycache__/kclique.cpython-312.pyc | Bin 0 -> 3149 bytes .../__pycache__/kernighan_lin.cpython-312.pyc | Bin 0 -> 6770 bytes .../__pycache__/label_propagation.cpython-312.pyc | Bin 0 -> 13404 bytes .../community/__pycache__/leiden.cpython-312.pyc | Bin 0 -> 7515 bytes .../community/__pycache__/local.cpython-312.pyc | Bin 0 -> 8368 bytes .../community/__pycache__/louvain.cpython-312.pyc | Bin 0 -> 17264 bytes .../community/__pycache__/lukes.cpython-312.pyc | Bin 0 -> 10590 bytes .../__pycache__/modularity_max.cpython-312.pyc | Bin 0 -> 17217 bytes .../community/__pycache__/quality.cpython-312.pyc | Bin 0 -> 14013 bytes .../networkx/algorithms/community/asyn_fluid.py | 151 + .../networkx/algorithms/community/centrality.py | 171 ++ .../algorithms/community/community_utils.py | 30 + .../networkx/algorithms/community/divisive.py | 216 ++ .../networkx/algorithms/community/kclique.py | 79 + .../networkx/algorithms/community/kernighan_lin.py | 139 + .../algorithms/community/label_propagation.py | 338 +++ .../networkx/algorithms/community/leiden.py | 162 + .../networkx/algorithms/community/local.py | 220 ++ .../networkx/algorithms/community/louvain.py | 384 +++ .../networkx/algorithms/community/lukes.py | 227 ++ .../algorithms/community/modularity_max.py | 451 +++ .../networkx/algorithms/community/quality.py | 347 +++ .../algorithms/community/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 212 bytes .../__pycache__/test_asyn_fluid.cpython-312.pyc | Bin 0 -> 5899 bytes .../__pycache__/test_centrality.cpython-312.pyc | Bin 0 -> 5317 bytes .../__pycache__/test_divisive.cpython-312.pyc | Bin 0 -> 7781 bytes .../tests/__pycache__/test_kclique.cpython-312.pyc | Bin 0 -> 4733 bytes .../__pycache__/test_kernighan_lin.cpython-312.pyc | Bin 0 -> 4755 bytes .../test_label_propagation.cpython-312.pyc | Bin 0 -> 15638 bytes .../tests/__pycache__/test_leiden.cpython-312.pyc | Bin 0 -> 8583 bytes .../tests/__pycache__/test_local.cpython-312.pyc | Bin 0 -> 3551 bytes .../tests/__pycache__/test_louvain.cpython-312.pyc | Bin 0 -> 12610 bytes .../tests/__pycache__/test_lukes.cpython-312.pyc | Bin 0 -> 6297 bytes .../test_modularity_max.cpython-312.pyc | Bin 0 -> 12582 bytes .../tests/__pycache__/test_quality.cpython-312.pyc | Bin 0 -> 8412 bytes .../tests/__pycache__/test_utils.cpython-312.pyc | Bin 0 -> 1875 bytes .../algorithms/community/tests/test_asyn_fluid.py | 136 + .../algorithms/community/tests/test_centrality.py | 85 + .../algorithms/community/tests/test_divisive.py | 106 + .../algorithms/community/tests/test_kclique.py | 91 + .../community/tests/test_kernighan_lin.py | 92 + .../community/tests/test_label_propagation.py | 241 ++ .../algorithms/community/tests/test_leiden.py | 138 + .../algorithms/community/tests/test_local.py | 76 + .../algorithms/community/tests/test_louvain.py | 264 ++ .../algorithms/community/tests/test_lukes.py | 152 + .../community/tests/test_modularity_max.py | 340 +++ .../algorithms/community/tests/test_quality.py | 139 + .../algorithms/community/tests/test_utils.py | 26 + .../networkx/algorithms/components/__init__.py | 6 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 381 bytes .../__pycache__/attracting.cpython-312.pyc | Bin 0 -> 3746 bytes .../__pycache__/biconnected.cpython-312.pyc | Bin 0 -> 13082 bytes .../__pycache__/connected.cpython-312.pyc | Bin 0 -> 5900 bytes .../__pycache__/semiconnected.cpython-312.pyc | Bin 0 -> 2860 bytes .../__pycache__/strongly_connected.cpython-312.pyc | Bin 0 -> 11415 bytes .../__pycache__/weakly_connected.cpython-312.pyc | Bin 0 -> 5561 bytes .../networkx/algorithms/components/attracting.py | 115 + .../networkx/algorithms/components/biconnected.py | 394 +++ .../networkx/algorithms/components/connected.py | 216 ++ .../algorithms/components/semiconnected.py | 71 + .../algorithms/components/strongly_connected.py | 351 +++ .../algorithms/components/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 213 bytes .../__pycache__/test_attracting.cpython-312.pyc | Bin 0 -> 4692 bytes .../__pycache__/test_biconnected.cpython-312.pyc | Bin 0 -> 10795 bytes .../__pycache__/test_connected.cpython-312.pyc | Bin 0 -> 8483 bytes .../__pycache__/test_semiconnected.cpython-312.pyc | Bin 0 -> 5170 bytes .../test_strongly_connected.cpython-312.pyc | Bin 0 -> 11650 bytes .../test_weakly_connected.cpython-312.pyc | Bin 0 -> 6520 bytes .../algorithms/components/tests/test_attracting.py | 70 + .../components/tests/test_biconnected.py | 248 ++ .../algorithms/components/tests/test_connected.py | 138 + .../components/tests/test_semiconnected.py | 55 + .../components/tests/test_strongly_connected.py | 193 ++ .../components/tests/test_weakly_connected.py | 96 + .../algorithms/components/weakly_connected.py | 197 ++ .../networkx/algorithms/connectivity/__init__.py | 11 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 498 bytes .../__pycache__/connectivity.cpython-312.pyc | Bin 0 -> 30140 bytes .../connectivity/__pycache__/cuts.cpython-312.pyc | Bin 0 -> 23741 bytes .../__pycache__/disjoint_paths.cpython-312.pyc | Bin 0 -> 14926 bytes .../__pycache__/edge_augmentation.cpython-312.pyc | Bin 0 -> 47654 bytes .../__pycache__/edge_kcomponents.cpython-312.pyc | Bin 0 -> 22752 bytes .../__pycache__/kcomponents.cpython-312.pyc | Bin 0 -> 10384 bytes .../__pycache__/kcutsets.cpython-312.pyc | Bin 0 -> 8810 bytes .../__pycache__/stoerwagner.cpython-312.pyc | Bin 0 -> 6244 bytes .../connectivity/__pycache__/utils.cpython-312.pyc | Bin 0 -> 4337 bytes .../algorithms/connectivity/connectivity.py | 811 +++++ .../networkx/algorithms/connectivity/cuts.py | 616 ++++ .../algorithms/connectivity/disjoint_paths.py | 408 +++ .../algorithms/connectivity/edge_augmentation.py | 1270 ++++++++ .../algorithms/connectivity/edge_kcomponents.py | 592 ++++ .../algorithms/connectivity/kcomponents.py | 223 ++ .../networkx/algorithms/connectivity/kcutsets.py | 235 ++ .../algorithms/connectivity/stoerwagner.py | 152 + .../algorithms/connectivity/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 215 bytes .../__pycache__/test_connectivity.cpython-312.pyc | Bin 0 -> 27138 bytes .../tests/__pycache__/test_cuts.cpython-312.pyc | Bin 0 -> 16841 bytes .../test_disjoint_paths.cpython-312.pyc | Bin 0 -> 14370 bytes .../test_edge_augmentation.cpython-312.pyc | Bin 0 -> 21538 bytes .../test_edge_kcomponents.cpython-312.pyc | Bin 0 -> 22419 bytes .../__pycache__/test_kcomponents.cpython-312.pyc | Bin 0 -> 11276 bytes .../__pycache__/test_kcutsets.cpython-312.pyc | Bin 0 -> 13914 bytes .../__pycache__/test_stoer_wagner.cpython-312.pyc | Bin 0 -> 6298 bytes .../connectivity/tests/test_connectivity.py | 421 +++ .../algorithms/connectivity/tests/test_cuts.py | 309 ++ .../connectivity/tests/test_disjoint_paths.py | 249 ++ .../connectivity/tests/test_edge_augmentation.py | 502 ++++ .../connectivity/tests/test_edge_kcomponents.py | 488 +++ .../connectivity/tests/test_kcomponents.py | 296 ++ .../algorithms/connectivity/tests/test_kcutsets.py | 270 ++ .../connectivity/tests/test_stoer_wagner.py | 102 + .../networkx/algorithms/connectivity/utils.py | 88 + .../site-packages/networkx/algorithms/core.py | 588 ++++ .../site-packages/networkx/algorithms/covering.py | 142 + .../site-packages/networkx/algorithms/cuts.py | 398 +++ .../site-packages/networkx/algorithms/cycles.py | 1234 ++++++++ .../networkx/algorithms/d_separation.py | 677 +++++ .../site-packages/networkx/algorithms/dag.py | 1468 +++++++++ .../networkx/algorithms/distance_measures.py | 1159 ++++++++ .../networkx/algorithms/distance_regular.py | 238 ++ .../site-packages/networkx/algorithms/dominance.py | 135 + .../networkx/algorithms/dominating.py | 268 ++ .../networkx/algorithms/efficiency_measures.py | 167 ++ .../site-packages/networkx/algorithms/euler.py | 470 +++ .../networkx/algorithms/flow/__init__.py | 11 + .../flow/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 562 bytes .../__pycache__/boykovkolmogorov.cpython-312.pyc | Bin 0 -> 14603 bytes .../__pycache__/capacityscaling.cpython-312.pyc | Bin 0 -> 17106 bytes .../flow/__pycache__/dinitz_alg.cpython-312.pyc | Bin 0 -> 9122 bytes .../flow/__pycache__/edmondskarp.cpython-312.pyc | Bin 0 -> 8963 bytes .../flow/__pycache__/gomory_hu.cpython-312.pyc | Bin 0 -> 6795 bytes .../flow/__pycache__/maxflow.cpython-312.pyc | Bin 0 -> 23253 bytes .../flow/__pycache__/mincost.cpython-312.pyc | Bin 0 -> 13941 bytes .../__pycache__/networksimplex.cpython-312.pyc | Bin 0 -> 30286 bytes .../flow/__pycache__/preflowpush.cpython-312.pyc | Bin 0 -> 15833 bytes .../shortestaugmentingpath.cpython-312.pyc | Bin 0 -> 10477 bytes .../flow/__pycache__/utils.cpython-312.pyc | Bin 0 -> 8858 bytes .../networkx/algorithms/flow/boykovkolmogorov.py | 370 +++ .../networkx/algorithms/flow/capacityscaling.py | 407 +++ .../networkx/algorithms/flow/dinitz_alg.py | 238 ++ .../networkx/algorithms/flow/edmondskarp.py | 241 ++ .../networkx/algorithms/flow/gomory_hu.py | 178 ++ .../networkx/algorithms/flow/maxflow.py | 611 ++++ .../networkx/algorithms/flow/mincost.py | 356 +++ .../networkx/algorithms/flow/networksimplex.py | 662 +++++ .../networkx/algorithms/flow/preflowpush.py | 425 +++ .../algorithms/flow/shortestaugmentingpath.py | 300 ++ .../networkx/algorithms/flow/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 207 bytes .../__pycache__/test_gomory_hu.cpython-312.pyc | Bin 0 -> 8857 bytes .../tests/__pycache__/test_maxflow.cpython-312.pyc | Bin 0 -> 26556 bytes .../test_maxflow_large_graph.cpython-312.pyc | Bin 0 -> 6697 bytes .../tests/__pycache__/test_mincost.cpython-312.pyc | Bin 0 -> 27852 bytes .../test_networksimplex.cpython-312.pyc | Bin 0 -> 23407 bytes .../networkx/algorithms/flow/tests/gl1.gpickle.bz2 | Bin 0 -> 44623 bytes .../networkx/algorithms/flow/tests/gw1.gpickle.bz2 | Bin 0 -> 42248 bytes .../algorithms/flow/tests/netgen-2.gpickle.bz2 | Bin 0 -> 18972 bytes .../algorithms/flow/tests/test_gomory_hu.py | 128 + .../networkx/algorithms/flow/tests/test_maxflow.py | 573 ++++ .../flow/tests/test_maxflow_large_graph.py | 155 + .../networkx/algorithms/flow/tests/test_mincost.py | 475 +++ .../algorithms/flow/tests/test_networksimplex.py | 481 +++ .../algorithms/flow/tests/wlm3.gpickle.bz2 | Bin 0 -> 88132 bytes .../networkx/algorithms/flow/utils.py | 194 ++ .../networkx/algorithms/graph_hashing.py | 435 +++ .../site-packages/networkx/algorithms/graphical.py | 483 +++ .../site-packages/networkx/algorithms/hierarchy.py | 57 + .../site-packages/networkx/algorithms/hybrid.py | 196 ++ .../site-packages/networkx/algorithms/isolate.py | 107 + .../networkx/algorithms/isomorphism/__init__.py | 7 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 621 bytes .../isomorphism/__pycache__/ismags.cpython-312.pyc | Bin 0 -> 46618 bytes .../__pycache__/isomorph.cpython-312.pyc | Bin 0 -> 12922 bytes .../__pycache__/isomorphvf2.cpython-312.pyc | Bin 0 -> 41990 bytes .../__pycache__/matchhelpers.cpython-312.pyc | Bin 0 -> 15681 bytes .../temporalisomorphvf2.cpython-312.pyc | Bin 0 -> 13824 bytes .../__pycache__/tree_isomorphism.cpython-312.pyc | Bin 0 -> 9387 bytes .../isomorphism/__pycache__/vf2pp.cpython-312.pyc | Bin 0 -> 40600 bytes .../__pycache__/vf2userfunc.cpython-312.pyc | Bin 0 -> 7881 bytes .../networkx/algorithms/isomorphism/ismags.py | 1174 ++++++++ .../networkx/algorithms/isomorphism/isomorph.py | 336 +++ .../networkx/algorithms/isomorphism/isomorphvf2.py | 1262 ++++++++ .../algorithms/isomorphism/matchhelpers.py | 352 +++ .../algorithms/isomorphism/temporalisomorphvf2.py | 308 ++ .../algorithms/isomorphism/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 214 bytes .../tests/__pycache__/test_ismags.cpython-312.pyc | Bin 0 -> 14949 bytes .../__pycache__/test_isomorphism.cpython-312.pyc | Bin 0 -> 9090 bytes .../__pycache__/test_isomorphvf2.cpython-312.pyc | Bin 0 -> 19306 bytes .../__pycache__/test_match_helpers.cpython-312.pyc | Bin 0 -> 4011 bytes .../test_temporalisomorphvf2.cpython-312.pyc | Bin 0 -> 11869 bytes .../test_tree_isomorphism.cpython-312.pyc | Bin 0 -> 8350 bytes .../tests/__pycache__/test_vf2pp.cpython-312.pyc | Bin 0 -> 59414 bytes .../__pycache__/test_vf2pp_helpers.cpython-312.pyc | Bin 0 -> 98711 bytes .../__pycache__/test_vf2userfunc.cpython-312.pyc | Bin 0 -> 13085 bytes .../algorithms/isomorphism/tests/iso_r01_s80.A99 | Bin 0 -> 1442 bytes .../algorithms/isomorphism/tests/iso_r01_s80.B99 | Bin 0 -> 1442 bytes .../algorithms/isomorphism/tests/si2_b06_m200.A99 | Bin 0 -> 310 bytes .../algorithms/isomorphism/tests/si2_b06_m200.B99 | Bin 0 -> 1602 bytes .../algorithms/isomorphism/tests/test_ismags.py | 351 +++ .../isomorphism/tests/test_isomorphism.py | 109 + .../isomorphism/tests/test_isomorphvf2.py | 439 +++ .../isomorphism/tests/test_match_helpers.py | 64 + .../isomorphism/tests/test_temporalisomorphvf2.py | 212 ++ .../isomorphism/tests/test_tree_isomorphism.py | 202 ++ .../algorithms/isomorphism/tests/test_vf2pp.py | 1609 ++++++++++ .../isomorphism/tests/test_vf2pp_helpers.py | 3118 ++++++++++++++++++++ .../isomorphism/tests/test_vf2userfunc.py | 200 ++ .../algorithms/isomorphism/tree_isomorphism.py | 264 ++ .../networkx/algorithms/isomorphism/vf2pp.py | 1102 +++++++ .../networkx/algorithms/isomorphism/vf2userfunc.py | 192 ++ .../networkx/algorithms/link_analysis/__init__.py | 2 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 335 bytes .../__pycache__/hits_alg.cpython-312.pyc | Bin 0 -> 13425 bytes .../__pycache__/pagerank_alg.cpython-312.pyc | Bin 0 -> 19874 bytes .../networkx/algorithms/link_analysis/hits_alg.py | 337 +++ .../algorithms/link_analysis/pagerank_alg.py | 499 ++++ .../algorithms/link_analysis/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 216 bytes .../tests/__pycache__/test_hits.cpython-312.pyc | Bin 0 -> 5331 bytes .../__pycache__/test_pagerank.cpython-312.pyc | Bin 0 -> 12615 bytes .../algorithms/link_analysis/tests/test_hits.py | 77 + .../link_analysis/tests/test_pagerank.py | 213 ++ .../networkx/algorithms/link_prediction.py | 687 +++++ .../networkx/algorithms/lowest_common_ancestors.py | 280 ++ .../site-packages/networkx/algorithms/matching.py | 1151 ++++++++ .../networkx/algorithms/minors/__init__.py | 27 + .../minors/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 726 bytes .../minors/__pycache__/contraction.cpython-312.pyc | Bin 0 -> 24465 bytes .../networkx/algorithms/minors/contraction.py | 666 +++++ .../__pycache__/test_contraction.cpython-312.pyc | Bin 0 -> 27511 bytes .../algorithms/minors/tests/test_contraction.py | 544 ++++ .../site-packages/networkx/algorithms/mis.py | 78 + .../site-packages/networkx/algorithms/moral.py | 59 + .../networkx/algorithms/node_classification.py | 219 ++ .../networkx/algorithms/non_randomness.py | 98 + .../networkx/algorithms/operators/__init__.py | 4 + .../operators/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 414 bytes .../operators/__pycache__/all.cpython-312.pyc | Bin 0 -> 12182 bytes .../operators/__pycache__/binary.cpython-312.pyc | Bin 0 -> 15141 bytes .../operators/__pycache__/product.cpython-312.pyc | Bin 0 -> 25202 bytes .../operators/__pycache__/unary.cpython-312.pyc | Bin 0 -> 2721 bytes .../networkx/algorithms/operators/all.py | 324 ++ .../networkx/algorithms/operators/binary.py | 468 +++ .../networkx/algorithms/operators/product.py | 633 ++++ .../algorithms/operators/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 212 bytes .../tests/__pycache__/test_all.cpython-312.pyc | Bin 0 -> 19502 bytes .../tests/__pycache__/test_binary.cpython-312.pyc | Bin 0 -> 29140 bytes .../tests/__pycache__/test_product.cpython-312.pyc | Bin 0 -> 28725 bytes .../tests/__pycache__/test_unary.cpython-312.pyc | Bin 0 -> 3023 bytes .../algorithms/operators/tests/test_all.py | 328 ++ .../algorithms/operators/tests/test_binary.py | 451 +++ .../algorithms/operators/tests/test_product.py | 491 +++ .../algorithms/operators/tests/test_unary.py | 55 + .../networkx/algorithms/operators/unary.py | 77 + .../networkx/algorithms/planar_drawing.py | 464 +++ .../site-packages/networkx/algorithms/planarity.py | 1463 +++++++++ .../networkx/algorithms/polynomials.py | 306 ++ .../networkx/algorithms/reciprocity.py | 98 + .../site-packages/networkx/algorithms/regular.py | 215 ++ .../site-packages/networkx/algorithms/richclub.py | 138 + .../networkx/algorithms/shortest_paths/__init__.py | 5 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 503 bytes .../__pycache__/astar.cpython-312.pyc | Bin 0 -> 8625 bytes .../__pycache__/dense.cpython-312.pyc | Bin 0 -> 9334 bytes .../__pycache__/generic.cpython-312.pyc | Bin 0 -> 25786 bytes .../__pycache__/unweighted.cpython-312.pyc | Bin 0 -> 16125 bytes .../__pycache__/weighted.cpython-312.pyc | Bin 0 -> 84792 bytes .../networkx/algorithms/shortest_paths/astar.py | 239 ++ .../networkx/algorithms/shortest_paths/dense.py | 264 ++ .../networkx/algorithms/shortest_paths/generic.py | 713 +++++ .../algorithms/shortest_paths/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 217 bytes .../tests/__pycache__/test_astar.cpython-312.pyc | Bin 0 -> 15904 bytes .../tests/__pycache__/test_dense.cpython-312.pyc | Bin 0 -> 7700 bytes .../__pycache__/test_dense_numpy.cpython-312.pyc | Bin 0 -> 4760 bytes .../tests/__pycache__/test_generic.cpython-312.pyc | Bin 0 -> 31458 bytes .../__pycache__/test_unweighted.cpython-312.pyc | Bin 0 -> 11738 bytes .../__pycache__/test_weighted.cpython-312.pyc | Bin 0 -> 56793 bytes .../algorithms/shortest_paths/tests/test_astar.py | 254 ++ .../algorithms/shortest_paths/tests/test_dense.py | 212 ++ .../shortest_paths/tests/test_dense_numpy.py | 88 + .../shortest_paths/tests/test_generic.py | 450 +++ .../shortest_paths/tests/test_unweighted.py | 149 + .../shortest_paths/tests/test_weighted.py | 972 ++++++ .../algorithms/shortest_paths/unweighted.py | 562 ++++ .../networkx/algorithms/shortest_paths/weighted.py | 2516 ++++++++++++++++ .../networkx/algorithms/similarity.py | 1783 +++++++++++ .../networkx/algorithms/simple_paths.py | 948 ++++++ .../networkx/algorithms/smallworld.py | 404 +++ .../site-packages/networkx/algorithms/smetric.py | 30 + .../networkx/algorithms/sparsifiers.py | 296 ++ .../networkx/algorithms/structuralholes.py | 374 +++ .../networkx/algorithms/summarization.py | 564 ++++ .../site-packages/networkx/algorithms/swap.py | 406 +++ .../networkx/algorithms/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 202 bytes .../__pycache__/test_asteroidal.cpython-312.pyc | Bin 0 -> 1126 bytes .../__pycache__/test_boundary.cpython-312.pyc | Bin 0 -> 12290 bytes .../tests/__pycache__/test_bridges.cpython-312.pyc | Bin 0 -> 7284 bytes .../__pycache__/test_broadcasting.cpython-312.pyc | Bin 0 -> 3680 bytes .../tests/__pycache__/test_chains.cpython-312.pyc | Bin 0 -> 6398 bytes .../tests/__pycache__/test_chordal.cpython-312.pyc | Bin 0 -> 8411 bytes .../tests/__pycache__/test_clique.cpython-312.pyc | Bin 0 -> 15682 bytes .../tests/__pycache__/test_cluster.cpython-312.pyc | Bin 0 -> 31123 bytes .../test_communicability.cpython-312.pyc | Bin 0 -> 3216 bytes .../tests/__pycache__/test_core.cpython-312.pyc | Bin 0 -> 18529 bytes .../__pycache__/test_covering.cpython-312.pyc | Bin 0 -> 5992 bytes .../tests/__pycache__/test_cuts.cpython-312.pyc | Bin 0 -> 9544 bytes .../tests/__pycache__/test_cycles.cpython-312.pyc | Bin 0 -> 60383 bytes .../__pycache__/test_d_separation.cpython-312.pyc | Bin 0 -> 17254 bytes .../tests/__pycache__/test_dag.cpython-312.pyc | Bin 0 -> 52512 bytes .../test_distance_measures.cpython-312.pyc | Bin 0 -> 58067 bytes .../test_distance_regular.cpython-312.pyc | Bin 0 -> 6595 bytes .../__pycache__/test_dominance.cpython-312.pyc | Bin 0 -> 13635 bytes .../__pycache__/test_dominating.cpython-312.pyc | Bin 0 -> 6611 bytes .../__pycache__/test_efficiency.cpython-312.pyc | Bin 0 -> 3952 bytes .../tests/__pycache__/test_euler.cpython-312.pyc | Bin 0 -> 24668 bytes .../__pycache__/test_graph_hashing.cpython-312.pyc | Bin 0 -> 43056 bytes .../__pycache__/test_graphical.cpython-312.pyc | Bin 0 -> 10060 bytes .../__pycache__/test_hierarchy.cpython-312.pyc | Bin 0 -> 2913 bytes .../tests/__pycache__/test_hybrid.cpython-312.pyc | Bin 0 -> 1404 bytes .../tests/__pycache__/test_isolate.cpython-312.pyc | Bin 0 -> 1650 bytes .../test_link_prediction.cpython-312.pyc | Bin 0 -> 40745 bytes .../test_lowest_common_ancestors.cpython-312.pyc | Bin 0 -> 27839 bytes .../__pycache__/test_matching.cpython-312.pyc | Bin 0 -> 28766 bytes .../test_max_weight_clique.cpython-312.pyc | Bin 0 -> 9642 bytes .../tests/__pycache__/test_mis.cpython-312.pyc | Bin 0 -> 4398 bytes .../tests/__pycache__/test_moral.cpython-312.pyc | Bin 0 -> 1169 bytes .../test_node_classification.cpython-312.pyc | Bin 0 -> 9010 bytes .../test_non_randomness.cpython-312.pyc | Bin 0 -> 2594 bytes .../test_planar_drawing.cpython-312.pyc | Bin 0 -> 11890 bytes .../__pycache__/test_planarity.cpython-312.pyc | Bin 0 -> 25554 bytes .../__pycache__/test_polynomials.cpython-312.pyc | Bin 0 -> 3674 bytes .../__pycache__/test_reciprocity.cpython-312.pyc | Bin 0 -> 2641 bytes .../tests/__pycache__/test_regular.cpython-312.pyc | Bin 0 -> 6850 bytes .../__pycache__/test_richclub.cpython-312.pyc | Bin 0 -> 6893 bytes .../__pycache__/test_similarity.cpython-312.pyc | Bin 0 -> 45791 bytes .../__pycache__/test_simple_paths.cpython-312.pyc | Bin 0 -> 49342 bytes .../__pycache__/test_smallworld.cpython-312.pyc | Bin 0 -> 4896 bytes .../tests/__pycache__/test_smetric.cpython-312.pyc | Bin 0 -> 570 bytes .../__pycache__/test_sparsifiers.cpython-312.pyc | Bin 0 -> 6552 bytes .../test_structuralholes.cpython-312.pyc | Bin 0 -> 14011 bytes .../__pycache__/test_summarization.cpython-312.pyc | Bin 0 -> 24295 bytes .../tests/__pycache__/test_swap.cpython-312.pyc | Bin 0 -> 16071 bytes .../__pycache__/test_threshold.cpython-312.pyc | Bin 0 -> 19068 bytes .../test_time_dependent.cpython-312.pyc | Bin 0 -> 20084 bytes .../__pycache__/test_tournament.cpython-312.pyc | Bin 0 -> 8435 bytes .../tests/__pycache__/test_triads.cpython-312.pyc | Bin 0 -> 17276 bytes .../__pycache__/test_vitality.cpython-312.pyc | Bin 0 -> 3123 bytes .../tests/__pycache__/test_voronoi.cpython-312.pyc | Bin 0 -> 5948 bytes .../tests/__pycache__/test_walks.cpython-312.pyc | Bin 0 -> 2813 bytes .../tests/__pycache__/test_wiener.cpython-312.pyc | Bin 0 -> 5181 bytes .../networkx/algorithms/tests/test_asteroidal.py | 23 + .../networkx/algorithms/tests/test_boundary.py | 154 + .../networkx/algorithms/tests/test_bridges.py | 144 + .../networkx/algorithms/tests/test_broadcasting.py | 82 + .../networkx/algorithms/tests/test_chains.py | 136 + .../networkx/algorithms/tests/test_chordal.py | 129 + .../networkx/algorithms/tests/test_clique.py | 291 ++ .../networkx/algorithms/tests/test_cluster.py | 584 ++++ .../algorithms/tests/test_communicability.py | 80 + .../networkx/algorithms/tests/test_core.py | 266 ++ .../networkx/algorithms/tests/test_covering.py | 85 + .../networkx/algorithms/tests/test_cuts.py | 171 ++ .../networkx/algorithms/tests/test_cycles.py | 984 ++++++ .../networkx/algorithms/tests/test_d_separation.py | 340 +++ .../networkx/algorithms/tests/test_dag.py | 837 ++++++ .../algorithms/tests/test_distance_measures.py | 831 ++++++ .../algorithms/tests/test_distance_regular.py | 85 + .../networkx/algorithms/tests/test_dominance.py | 286 ++ .../networkx/algorithms/tests/test_dominating.py | 115 + .../networkx/algorithms/tests/test_efficiency.py | 58 + .../networkx/algorithms/tests/test_euler.py | 314 ++ .../algorithms/tests/test_graph_hashing.py | 872 ++++++ .../networkx/algorithms/tests/test_graphical.py | 163 + .../networkx/algorithms/tests/test_hierarchy.py | 46 + .../networkx/algorithms/tests/test_hybrid.py | 24 + .../networkx/algorithms/tests/test_isolate.py | 26 + .../algorithms/tests/test_link_prediction.py | 593 ++++ .../tests/test_lowest_common_ancestors.py | 459 +++ .../networkx/algorithms/tests/test_matching.py | 548 ++++ .../algorithms/tests/test_max_weight_clique.py | 179 ++ .../networkx/algorithms/tests/test_mis.py | 62 + .../networkx/algorithms/tests/test_moral.py | 15 + .../algorithms/tests/test_node_classification.py | 140 + .../algorithms/tests/test_non_randomness.py | 42 + .../algorithms/tests/test_planar_drawing.py | 274 ++ .../networkx/algorithms/tests/test_planarity.py | 556 ++++ .../networkx/algorithms/tests/test_polynomials.py | 57 + .../networkx/algorithms/tests/test_reciprocity.py | 37 + .../networkx/algorithms/tests/test_regular.py | 91 + .../networkx/algorithms/tests/test_richclub.py | 149 + .../networkx/algorithms/tests/test_similarity.py | 984 ++++++ .../networkx/algorithms/tests/test_simple_paths.py | 803 +++++ .../networkx/algorithms/tests/test_smallworld.py | 76 + .../networkx/algorithms/tests/test_smetric.py | 8 + .../networkx/algorithms/tests/test_sparsifiers.py | 138 + .../algorithms/tests/test_structuralholes.py | 191 ++ .../algorithms/tests/test_summarization.py | 642 ++++ .../networkx/algorithms/tests/test_swap.py | 179 ++ .../networkx/algorithms/tests/test_threshold.py | 266 ++ .../algorithms/tests/test_time_dependent.py | 431 +++ .../networkx/algorithms/tests/test_tournament.py | 161 + .../networkx/algorithms/tests/test_triads.py | 248 ++ .../networkx/algorithms/tests/test_vitality.py | 41 + .../networkx/algorithms/tests/test_voronoi.py | 103 + .../networkx/algorithms/tests/test_walks.py | 54 + .../networkx/algorithms/tests/test_wiener.py | 123 + .../site-packages/networkx/algorithms/threshold.py | 1035 +++++++ .../networkx/algorithms/time_dependent.py | 142 + .../networkx/algorithms/tournament.py | 404 +++ .../networkx/algorithms/traversal/__init__.py | 5 + .../traversal/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 350 bytes .../__pycache__/beamsearch.cpython-312.pyc | Bin 0 -> 3426 bytes .../breadth_first_search.cpython-312.pyc | Bin 0 -> 19426 bytes .../__pycache__/depth_first_search.cpython-312.pyc | Bin 0 -> 17778 bytes .../traversal/__pycache__/edgebfs.cpython-312.pyc | Bin 0 -> 7777 bytes .../traversal/__pycache__/edgedfs.cpython-312.pyc | Bin 0 -> 7042 bytes .../networkx/algorithms/traversal/beamsearch.py | 90 + .../algorithms/traversal/breadth_first_search.py | 576 ++++ .../algorithms/traversal/depth_first_search.py | 529 ++++ .../networkx/algorithms/traversal/edgebfs.py | 185 ++ .../networkx/algorithms/traversal/edgedfs.py | 182 ++ .../algorithms/traversal/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 212 bytes .../__pycache__/test_beamsearch.cpython-312.pyc | Bin 0 -> 1544 bytes .../tests/__pycache__/test_bfs.cpython-312.pyc | Bin 0 -> 12714 bytes .../tests/__pycache__/test_dfs.cpython-312.pyc | Bin 0 -> 15396 bytes .../tests/__pycache__/test_edgebfs.cpython-312.pyc | Bin 0 -> 8849 bytes .../tests/__pycache__/test_edgedfs.cpython-312.pyc | Bin 0 -> 7844 bytes .../algorithms/traversal/tests/test_beamsearch.py | 25 + .../algorithms/traversal/tests/test_bfs.py | 203 ++ .../algorithms/traversal/tests/test_dfs.py | 307 ++ .../algorithms/traversal/tests/test_edgebfs.py | 147 + .../algorithms/traversal/tests/test_edgedfs.py | 131 + .../networkx/algorithms/tree/__init__.py | 6 + .../tree/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 351 bytes .../tree/__pycache__/branchings.cpython-312.pyc | Bin 0 -> 34347 bytes .../tree/__pycache__/coding.cpython-312.pyc | Bin 0 -> 15910 bytes .../tree/__pycache__/decomposition.cpython-312.pyc | Bin 0 -> 4145 bytes .../tree/__pycache__/mst.cpython-312.pyc | Bin 0 -> 46855 bytes .../tree/__pycache__/operations.cpython-312.pyc | Bin 0 -> 4788 bytes .../tree/__pycache__/recognition.cpython-312.pyc | Bin 0 -> 9849 bytes .../networkx/algorithms/tree/branchings.py | 1042 +++++++ .../networkx/algorithms/tree/coding.py | 413 +++ .../networkx/algorithms/tree/decomposition.py | 88 + .../site-packages/networkx/algorithms/tree/mst.py | 1283 ++++++++ .../networkx/algorithms/tree/operations.py | 106 + .../networkx/algorithms/tree/recognition.py | 273 ++ .../networkx/algorithms/tree/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 207 bytes .../__pycache__/test_branchings.cpython-312.pyc | Bin 0 -> 23538 bytes .../tests/__pycache__/test_coding.cpython-312.pyc | Bin 0 -> 8384 bytes .../__pycache__/test_decomposition.cpython-312.pyc | Bin 0 -> 3028 bytes .../tests/__pycache__/test_mst.cpython-312.pyc | Bin 0 -> 45028 bytes .../__pycache__/test_operations.cpython-312.pyc | Bin 0 -> 4059 bytes .../__pycache__/test_recognition.cpython-312.pyc | Bin 0 -> 11505 bytes .../algorithms/tree/tests/test_branchings.py | 624 ++++ .../networkx/algorithms/tree/tests/test_coding.py | 114 + .../algorithms/tree/tests/test_decomposition.py | 79 + .../networkx/algorithms/tree/tests/test_mst.py | 918 ++++++ .../algorithms/tree/tests/test_operations.py | 53 + .../algorithms/tree/tests/test_recognition.py | 174 ++ .../site-packages/networkx/algorithms/triads.py | 500 ++++ .../site-packages/networkx/algorithms/vitality.py | 76 + .../site-packages/networkx/algorithms/voronoi.py | 86 + .../site-packages/networkx/algorithms/walks.py | 79 + .../site-packages/networkx/algorithms/wiener.py | 226 ++ .../site-packages/networkx/classes/__init__.py | 13 + .../classes/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 611 bytes .../classes/__pycache__/coreviews.cpython-312.pyc | Bin 0 -> 23308 bytes .../classes/__pycache__/digraph.cpython-312.pyc | Bin 0 -> 54038 bytes .../classes/__pycache__/filters.cpython-312.pyc | Bin 0 -> 5522 bytes .../classes/__pycache__/function.cpython-312.pyc | Bin 0 -> 49500 bytes .../classes/__pycache__/graph.cpython-312.pyc | Bin 0 -> 79387 bytes .../classes/__pycache__/graphviews.cpython-312.pyc | Bin 0 -> 9686 bytes .../__pycache__/multidigraph.cpython-312.pyc | Bin 0 -> 40145 bytes .../classes/__pycache__/multigraph.cpython-312.pyc | Bin 0 -> 51663 bytes .../__pycache__/reportviews.cpython-312.pyc | Bin 0 -> 71836 bytes .../site-packages/networkx/classes/coreviews.py | 435 +++ .../site-packages/networkx/classes/digraph.py | 1352 +++++++++ .../site-packages/networkx/classes/filters.py | 95 + .../site-packages/networkx/classes/function.py | 1416 +++++++++ .../site-packages/networkx/classes/graph.py | 2071 +++++++++++++ .../site-packages/networkx/classes/graphviews.py | 269 ++ .../site-packages/networkx/classes/multidigraph.py | 966 ++++++ .../site-packages/networkx/classes/multigraph.py | 1283 ++++++++ .../site-packages/networkx/classes/reportviews.py | 1447 +++++++++ .../networkx/classes/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 199 bytes .../__pycache__/dispatch_interface.cpython-312.pyc | Bin 0 -> 9165 bytes .../__pycache__/historical_tests.cpython-312.pyc | Bin 0 -> 27460 bytes .../__pycache__/test_coreviews.cpython-312.pyc | Bin 0 -> 27633 bytes .../tests/__pycache__/test_digraph.cpython-312.pyc | Bin 0 -> 26043 bytes .../test_digraph_historical.cpython-312.pyc | Bin 0 -> 8702 bytes .../tests/__pycache__/test_filters.cpython-312.pyc | Bin 0 -> 10815 bytes .../__pycache__/test_function.cpython-312.pyc | Bin 0 -> 61327 bytes .../tests/__pycache__/test_graph.cpython-312.pyc | Bin 0 -> 61203 bytes .../test_graph_historical.cpython-312.pyc | Bin 0 -> 830 bytes .../__pycache__/test_graphviews.cpython-312.pyc | Bin 0 -> 26159 bytes .../__pycache__/test_multidigraph.cpython-312.pyc | Bin 0 -> 28784 bytes .../__pycache__/test_multigraph.cpython-312.pyc | Bin 0 -> 32902 bytes .../__pycache__/test_reportviews.cpython-312.pyc | Bin 0 -> 73874 bytes .../tests/__pycache__/test_special.cpython-312.pyc | Bin 0 -> 8263 bytes .../__pycache__/test_subgraphviews.cpython-312.pyc | Bin 0 -> 26516 bytes .../networkx/classes/tests/dispatch_interface.py | 185 ++ .../networkx/classes/tests/historical_tests.py | 475 +++ .../networkx/classes/tests/test_coreviews.py | 362 +++ .../networkx/classes/tests/test_digraph.py | 331 +++ .../classes/tests/test_digraph_historical.py | 110 + .../networkx/classes/tests/test_filters.py | 177 ++ .../networkx/classes/tests/test_function.py | 1035 +++++++ .../networkx/classes/tests/test_graph.py | 927 ++++++ .../classes/tests/test_graph_historical.py | 12 + .../networkx/classes/tests/test_graphviews.py | 349 +++ .../networkx/classes/tests/test_multidigraph.py | 459 +++ .../networkx/classes/tests/test_multigraph.py | 528 ++++ .../networkx/classes/tests/test_reportviews.py | 1421 +++++++++ .../networkx/classes/tests/test_special.py | 131 + .../networkx/classes/tests/test_subgraphviews.py | 371 +++ .../python3.12/site-packages/networkx/conftest.py | 257 ++ .../python3.12/site-packages/networkx/convert.py | 502 ++++ .../site-packages/networkx/convert_matrix.py | 1314 +++++++++ .../site-packages/networkx/drawing/__init__.py | 7 + .../drawing/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 336 bytes .../drawing/__pycache__/layout.cpython-312.pyc | Bin 0 -> 74328 bytes .../drawing/__pycache__/nx_agraph.cpython-312.pyc | Bin 0 -> 17507 bytes .../drawing/__pycache__/nx_latex.cpython-312.pyc | Bin 0 -> 24822 bytes .../drawing/__pycache__/nx_pydot.cpython-312.pyc | Bin 0 -> 12750 bytes .../drawing/__pycache__/nx_pylab.cpython-312.pyc | Bin 0 -> 107766 bytes .../site-packages/networkx/drawing/layout.py | 2028 +++++++++++++ .../site-packages/networkx/drawing/nx_agraph.py | 464 +++ .../site-packages/networkx/drawing/nx_latex.py | 570 ++++ .../site-packages/networkx/drawing/nx_pydot.py | 361 +++ .../site-packages/networkx/drawing/nx_pylab.py | 3021 +++++++++++++++++++ .../networkx/drawing/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 199 bytes .../tests/__pycache__/test_agraph.cpython-312.pyc | Bin 0 -> 17999 bytes .../tests/__pycache__/test_latex.cpython-312.pyc | Bin 0 -> 10989 bytes .../tests/__pycache__/test_layout.cpython-312.pyc | Bin 0 -> 42803 bytes .../tests/__pycache__/test_pydot.cpython-312.pyc | Bin 0 -> 7801 bytes .../tests/__pycache__/test_pylab.cpython-312.pyc | Bin 0 -> 92160 bytes .../tests/baseline/test_display_complex.png | Bin 0 -> 142315 bytes .../tests/baseline/test_display_empty_graph.png | Bin 0 -> 3257 bytes .../baseline/test_display_house_with_colors.png | Bin 0 -> 26002 bytes .../baseline/test_display_labels_and_colors.png | Bin 0 -> 70446 bytes .../tests/baseline/test_display_shortest_path.png | Bin 0 -> 35897 bytes .../tests/baseline/test_house_with_colors.png | Bin 0 -> 21918 bytes .../networkx/drawing/tests/test_agraph.py | 240 ++ .../networkx/drawing/tests/test_latex.py | 285 ++ .../networkx/drawing/tests/test_layout.py | 631 ++++ .../networkx/drawing/tests/test_pydot.py | 146 + .../networkx/drawing/tests/test_pylab.py | 1756 +++++++++++ .../python3.12/site-packages/networkx/exception.py | 131 + .../site-packages/networkx/generators/__init__.py | 34 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 1583 bytes .../generators/__pycache__/atlas.cpython-312.pyc | Bin 0 -> 6908 bytes .../generators/__pycache__/classic.cpython-312.pyc | Bin 0 -> 37275 bytes .../__pycache__/cographs.cpython-312.pyc | Bin 0 -> 2702 bytes .../__pycache__/community.cpython-312.pyc | Bin 0 -> 39110 bytes .../__pycache__/degree_seq.cpython-312.pyc | Bin 0 -> 34568 bytes .../__pycache__/directed.cpython-312.pyc | Bin 0 -> 21629 bytes .../__pycache__/duplication.cpython-312.pyc | Bin 0 -> 6380 bytes .../generators/__pycache__/ego.cpython-312.pyc | Bin 0 -> 2315 bytes .../__pycache__/expanders.cpython-312.pyc | Bin 0 -> 15847 bytes .../__pycache__/geometric.cpython-312.pyc | Bin 0 -> 44278 bytes .../__pycache__/harary_graph.cpython-312.pyc | Bin 0 -> 6681 bytes .../__pycache__/internet_as_graphs.cpython-312.pyc | Bin 0 -> 19268 bytes .../__pycache__/intersection.cpython-312.pyc | Bin 0 -> 5346 bytes .../__pycache__/interval_graph.cpython-312.pyc | Bin 0 -> 2767 bytes .../__pycache__/joint_degree_seq.cpython-312.pyc | Bin 0 -> 22680 bytes .../generators/__pycache__/lattice.cpython-312.pyc | Bin 0 -> 20283 bytes .../generators/__pycache__/line.cpython-312.pyc | Bin 0 -> 19717 bytes .../__pycache__/mycielski.cpython-312.pyc | Bin 0 -> 4909 bytes .../nonisomorphic_trees.cpython-312.pyc | Bin 0 -> 7143 bytes .../__pycache__/random_clustered.cpython-312.pyc | Bin 0 -> 5100 bytes .../__pycache__/random_graphs.cpython-312.pyc | Bin 0 -> 52732 bytes .../generators/__pycache__/small.cpython-312.pyc | Bin 0 -> 31757 bytes .../generators/__pycache__/social.cpython-312.pyc | Bin 0 -> 25287 bytes .../spectral_graph_forge.cpython-312.pyc | Bin 0 -> 5520 bytes .../__pycache__/stochastic.cpython-312.pyc | Bin 0 -> 2456 bytes .../generators/__pycache__/sudoku.cpython-312.pyc | Bin 0 -> 4854 bytes .../__pycache__/time_series.cpython-312.pyc | Bin 0 -> 3191 bytes .../generators/__pycache__/trees.cpython-312.pyc | Bin 0 -> 40261 bytes .../generators/__pycache__/triads.cpython-312.pyc | Bin 0 -> 2709 bytes .../site-packages/networkx/generators/atlas.dat.gz | Bin 0 -> 8887 bytes .../site-packages/networkx/generators/atlas.py | 227 ++ .../site-packages/networkx/generators/classic.py | 1068 +++++++ .../site-packages/networkx/generators/cographs.py | 68 + .../site-packages/networkx/generators/community.py | 1070 +++++++ .../networkx/generators/degree_seq.py | 867 ++++++ .../site-packages/networkx/generators/directed.py | 572 ++++ .../networkx/generators/duplication.py | 174 ++ .../site-packages/networkx/generators/ego.py | 66 + .../site-packages/networkx/generators/expanders.py | 476 +++ .../site-packages/networkx/generators/geometric.py | 1037 +++++++ .../networkx/generators/harary_graph.py | 199 ++ .../networkx/generators/internet_as_graphs.py | 441 +++ .../networkx/generators/intersection.py | 125 + .../networkx/generators/interval_graph.py | 70 + .../networkx/generators/joint_degree_seq.py | 664 +++++ .../site-packages/networkx/generators/lattice.py | 367 +++ .../site-packages/networkx/generators/line.py | 501 ++++ .../site-packages/networkx/generators/mycielski.py | 110 + .../networkx/generators/nonisomorphic_trees.py | 209 ++ .../networkx/generators/random_clustered.py | 117 + .../networkx/generators/random_graphs.py | 1400 +++++++++ .../site-packages/networkx/generators/small.py | 996 +++++++ .../site-packages/networkx/generators/social.py | 554 ++++ .../networkx/generators/spectral_graph_forge.py | 120 + .../networkx/generators/stochastic.py | 54 + .../site-packages/networkx/generators/sudoku.py | 131 + .../networkx/generators/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 202 bytes .../tests/__pycache__/test_atlas.cpython-312.pyc | Bin 0 -> 5318 bytes .../tests/__pycache__/test_classic.cpython-312.pyc | Bin 0 -> 47600 bytes .../__pycache__/test_cographs.cpython-312.pyc | Bin 0 -> 1002 bytes .../__pycache__/test_community.cpython-312.pyc | Bin 0 -> 20428 bytes .../__pycache__/test_degree_seq.cpython-312.pyc | Bin 0 -> 14487 bytes .../__pycache__/test_directed.cpython-312.pyc | Bin 0 -> 11922 bytes .../__pycache__/test_duplication.cpython-312.pyc | Bin 0 -> 7224 bytes .../tests/__pycache__/test_ego.cpython-312.pyc | Bin 0 -> 2968 bytes .../__pycache__/test_expanders.cpython-312.pyc | Bin 0 -> 11728 bytes .../__pycache__/test_geometric.cpython-312.pyc | Bin 0 -> 32298 bytes .../__pycache__/test_harary_graph.cpython-312.pyc | Bin 0 -> 5664 bytes .../test_internet_as_graphs.cpython-312.pyc | Bin 0 -> 10345 bytes .../__pycache__/test_intersection.cpython-312.pyc | Bin 0 -> 2040 bytes .../test_interval_graph.cpython-312.pyc | Bin 0 -> 6515 bytes .../test_joint_degree_seq.cpython-312.pyc | Bin 0 -> 3870 bytes .../tests/__pycache__/test_lattice.cpython-312.pyc | Bin 0 -> 18183 bytes .../tests/__pycache__/test_line.cpython-312.pyc | Bin 0 -> 20271 bytes .../__pycache__/test_mycielski.cpython-312.pyc | Bin 0 -> 2624 bytes .../test_nonisomorphic_trees.cpython-312.pyc | Bin 0 -> 4277 bytes .../test_random_clustered.cpython-312.pyc | Bin 0 -> 3004 bytes .../__pycache__/test_random_graphs.cpython-312.pyc | Bin 0 -> 31633 bytes .../tests/__pycache__/test_small.cpython-312.pyc | Bin 0 -> 16038 bytes .../test_spectral_graph_forge.cpython-312.pyc | Bin 0 -> 1786 bytes .../__pycache__/test_stochastic.cpython-312.pyc | Bin 0 -> 4692 bytes .../tests/__pycache__/test_sudoku.cpython-312.pyc | Bin 0 -> 2608 bytes .../__pycache__/test_time_series.cpython-312.pyc | Bin 0 -> 3935 bytes .../tests/__pycache__/test_trees.cpython-312.pyc | Bin 0 -> 12053 bytes .../tests/__pycache__/test_triads.cpython-312.pyc | Bin 0 -> 1009 bytes .../networkx/generators/tests/test_atlas.py | 75 + .../networkx/generators/tests/test_classic.py | 639 ++++ .../networkx/generators/tests/test_cographs.py | 18 + .../networkx/generators/tests/test_community.py | 362 +++ .../networkx/generators/tests/test_degree_seq.py | 237 ++ .../networkx/generators/tests/test_directed.py | 182 ++ .../networkx/generators/tests/test_duplication.py | 103 + .../networkx/generators/tests/test_ego.py | 39 + .../networkx/generators/tests/test_expanders.py | 171 ++ .../networkx/generators/tests/test_geometric.py | 488 +++ .../networkx/generators/tests/test_harary_graph.py | 133 + .../generators/tests/test_internet_as_graphs.py | 176 ++ .../networkx/generators/tests/test_intersection.py | 28 + .../generators/tests/test_interval_graph.py | 144 + .../generators/tests/test_joint_degree_seq.py | 125 + .../networkx/generators/tests/test_lattice.py | 246 ++ .../networkx/generators/tests/test_line.py | 309 ++ .../networkx/generators/tests/test_mycielski.py | 30 + .../generators/tests/test_nonisomorphic_trees.py | 56 + .../generators/tests/test_random_clustered.py | 33 + .../generators/tests/test_random_graphs.py | 478 +++ .../networkx/generators/tests/test_small.py | 202 ++ .../generators/tests/test_spectral_graph_forge.py | 49 + .../networkx/generators/tests/test_stochastic.py | 72 + .../networkx/generators/tests/test_sudoku.py | 92 + .../networkx/generators/tests/test_time_series.py | 64 + .../networkx/generators/tests/test_trees.py | 195 ++ .../networkx/generators/tests/test_triads.py | 15 + .../networkx/generators/time_series.py | 74 + .../site-packages/networkx/generators/trees.py | 1070 +++++++ .../site-packages/networkx/generators/triads.py | 94 + .../site-packages/networkx/lazy_imports.py | 188 ++ .../site-packages/networkx/linalg/__init__.py | 13 + .../linalg/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 754 bytes .../algebraicconnectivity.cpython-312.pyc | Bin 0 -> 28390 bytes .../linalg/__pycache__/attrmatrix.cpython-312.pyc | Bin 0 -> 17369 bytes .../__pycache__/bethehessianmatrix.cpython-312.pyc | Bin 0 -> 3806 bytes .../linalg/__pycache__/graphmatrix.cpython-312.pyc | Bin 0 -> 6479 bytes .../__pycache__/laplacianmatrix.cpython-312.pyc | Bin 0 -> 19178 bytes .../__pycache__/modularitymatrix.cpython-312.pyc | Bin 0 -> 5447 bytes .../linalg/__pycache__/spectrum.cpython-312.pyc | Bin 0 -> 5615 bytes .../networkx/linalg/algebraicconnectivity.py | 655 ++++ .../site-packages/networkx/linalg/attrmatrix.py | 466 +++ .../networkx/linalg/bethehessianmatrix.py | 79 + .../site-packages/networkx/linalg/graphmatrix.py | 168 ++ .../networkx/linalg/laplacianmatrix.py | 520 ++++ .../networkx/linalg/modularitymatrix.py | 166 ++ .../site-packages/networkx/linalg/spectrum.py | 186 ++ .../networkx/linalg/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 198 bytes .../test_algebraic_connectivity.cpython-312.pyc | Bin 0 -> 23532 bytes .../__pycache__/test_attrmatrix.cpython-312.pyc | Bin 0 -> 6007 bytes .../__pycache__/test_bethehessian.cpython-312.pyc | Bin 0 -> 2568 bytes .../__pycache__/test_graphmatrix.cpython-312.pyc | Bin 0 -> 13770 bytes .../__pycache__/test_laplacian.cpython-312.pyc | Bin 0 -> 16478 bytes .../__pycache__/test_modularity.cpython-312.pyc | Bin 0 -> 4219 bytes .../__pycache__/test_spectrum.cpython-312.pyc | Bin 0 -> 6358 bytes .../linalg/tests/test_algebraic_connectivity.py | 400 +++ .../networkx/linalg/tests/test_attrmatrix.py | 108 + .../networkx/linalg/tests/test_bethehessian.py | 40 + .../networkx/linalg/tests/test_graphmatrix.py | 275 ++ .../networkx/linalg/tests/test_laplacian.py | 334 +++ .../networkx/linalg/tests/test_modularity.py | 86 + .../networkx/linalg/tests/test_spectrum.py | 70 + .../site-packages/networkx/readwrite/__init__.py | 17 + .../readwrite/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 777 bytes .../readwrite/__pycache__/adjlist.cpython-312.pyc | Bin 0 -> 10019 bytes .../readwrite/__pycache__/edgelist.cpython-312.pyc | Bin 0 -> 15532 bytes .../readwrite/__pycache__/gexf.cpython-312.pyc | Bin 0 -> 45136 bytes .../readwrite/__pycache__/gml.cpython-312.pyc | Bin 0 -> 34784 bytes .../readwrite/__pycache__/graph6.cpython-312.pyc | Bin 0 -> 14708 bytes .../readwrite/__pycache__/graphml.cpython-312.pyc | Bin 0 -> 43610 bytes .../readwrite/__pycache__/leda.cpython-312.pyc | Bin 0 -> 4103 bytes .../__pycache__/multiline_adjlist.cpython-312.pyc | Bin 0 -> 12769 bytes .../readwrite/__pycache__/p2g.cpython-312.pyc | Bin 0 -> 4714 bytes .../readwrite/__pycache__/pajek.cpython-312.pyc | Bin 0 -> 11301 bytes .../readwrite/__pycache__/sparse6.cpython-312.pyc | Bin 0 -> 12611 bytes .../readwrite/__pycache__/text.cpython-312.pyc | Bin 0 -> 25990 bytes .../site-packages/networkx/readwrite/adjlist.py | 310 ++ .../site-packages/networkx/readwrite/edgelist.py | 489 +++ .../site-packages/networkx/readwrite/gexf.py | 1064 +++++++ .../site-packages/networkx/readwrite/gml.py | 879 ++++++ .../site-packages/networkx/readwrite/graph6.py | 427 +++ .../site-packages/networkx/readwrite/graphml.py | 1053 +++++++ .../networkx/readwrite/json_graph/__init__.py | 19 + .../__pycache__/__init__.cpython-312.pyc | Bin 0 -> 907 bytes .../__pycache__/adjacency.cpython-312.pyc | Bin 0 -> 5894 bytes .../__pycache__/cytoscape.cpython-312.pyc | Bin 0 -> 7625 bytes .../__pycache__/node_link.cpython-312.pyc | Bin 0 -> 11310 bytes .../json_graph/__pycache__/tree.cpython-312.pyc | Bin 0 -> 5184 bytes .../networkx/readwrite/json_graph/adjacency.py | 156 + .../networkx/readwrite/json_graph/cytoscape.py | 190 ++ .../networkx/readwrite/json_graph/node_link.py | 333 +++ .../readwrite/json_graph/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 212 bytes .../__pycache__/test_adjacency.cpython-312.pyc | Bin 0 -> 5251 bytes .../__pycache__/test_cytoscape.cpython-312.pyc | Bin 0 -> 4581 bytes .../__pycache__/test_node_link.cpython-312.pyc | Bin 0 -> 12038 bytes .../tests/__pycache__/test_tree.cpython-312.pyc | Bin 0 -> 3417 bytes .../readwrite/json_graph/tests/test_adjacency.py | 78 + .../readwrite/json_graph/tests/test_cytoscape.py | 78 + .../readwrite/json_graph/tests/test_node_link.py | 175 ++ .../readwrite/json_graph/tests/test_tree.py | 48 + .../networkx/readwrite/json_graph/tree.py | 137 + .../site-packages/networkx/readwrite/leda.py | 108 + .../networkx/readwrite/multiline_adjlist.py | 393 +++ .../site-packages/networkx/readwrite/p2g.py | 113 + .../site-packages/networkx/readwrite/pajek.py | 286 ++ .../site-packages/networkx/readwrite/sparse6.py | 379 +++ .../networkx/readwrite/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 201 bytes .../tests/__pycache__/test_adjlist.cpython-312.pyc | Bin 0 -> 18565 bytes .../__pycache__/test_edgelist.cpython-312.pyc | Bin 0 -> 18069 bytes .../tests/__pycache__/test_gexf.cpython-312.pyc | Bin 0 -> 34980 bytes .../tests/__pycache__/test_gml.cpython-312.pyc | Bin 0 -> 30123 bytes .../tests/__pycache__/test_graph6.cpython-312.pyc | Bin 0 -> 15251 bytes .../tests/__pycache__/test_graphml.cpython-312.pyc | Bin 0 -> 90846 bytes .../tests/__pycache__/test_leda.cpython-312.pyc | Bin 0 -> 2271 bytes .../tests/__pycache__/test_p2g.cpython-312.pyc | Bin 0 -> 3135 bytes .../tests/__pycache__/test_pajek.cpython-312.pyc | Bin 0 -> 7970 bytes .../tests/__pycache__/test_sparse6.cpython-312.pyc | Bin 0 -> 9470 bytes .../tests/__pycache__/test_text.cpython-312.pyc | Bin 0 -> 67233 bytes .../networkx/readwrite/tests/test_adjlist.py | 262 ++ .../networkx/readwrite/tests/test_edgelist.py | 314 ++ .../networkx/readwrite/tests/test_gexf.py | 579 ++++ .../networkx/readwrite/tests/test_gml.py | 744 +++++ .../networkx/readwrite/tests/test_graph6.py | 181 ++ .../networkx/readwrite/tests/test_graphml.py | 1531 ++++++++++ .../networkx/readwrite/tests/test_leda.py | 30 + .../networkx/readwrite/tests/test_p2g.py | 62 + .../networkx/readwrite/tests/test_pajek.py | 126 + .../networkx/readwrite/tests/test_sparse6.py | 166 ++ .../networkx/readwrite/tests/test_text.py | 1742 +++++++++++ .../site-packages/networkx/readwrite/text.py | 851 ++++++ .../python3.12/site-packages/networkx/relabel.py | 285 ++ .../site-packages/networkx/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 191 bytes .../test_all_random_functions.cpython-312.pyc | Bin 0 -> 14370 bytes .../tests/__pycache__/test_convert.cpython-312.pyc | Bin 0 -> 25242 bytes .../__pycache__/test_convert_numpy.cpython-312.pyc | Bin 0 -> 33918 bytes .../test_convert_pandas.cpython-312.pyc | Bin 0 -> 21722 bytes .../__pycache__/test_convert_scipy.cpython-312.pyc | Bin 0 -> 19696 bytes .../__pycache__/test_exceptions.cpython-312.pyc | Bin 0 -> 2501 bytes .../tests/__pycache__/test_import.cpython-312.pyc | Bin 0 -> 809 bytes .../__pycache__/test_lazy_imports.cpython-312.pyc | Bin 0 -> 4009 bytes .../tests/__pycache__/test_relabel.cpython-312.pyc | Bin 0 -> 27631 bytes .../networkx/tests/test_all_random_functions.py | 250 ++ .../site-packages/networkx/tests/test_convert.py | 321 ++ .../networkx/tests/test_convert_numpy.py | 531 ++++ .../networkx/tests/test_convert_pandas.py | 349 +++ .../networkx/tests/test_convert_scipy.py | 281 ++ .../networkx/tests/test_exceptions.py | 40 + .../site-packages/networkx/tests/test_import.py | 11 + .../networkx/tests/test_lazy_imports.py | 96 + .../site-packages/networkx/tests/test_relabel.py | 347 +++ .../site-packages/networkx/utils/__init__.py | 8 + .../utils/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 500 bytes .../utils/__pycache__/backends.cpython-312.pyc | Bin 0 -> 73020 bytes .../utils/__pycache__/configs.cpython-312.pyc | Bin 0 -> 20531 bytes .../utils/__pycache__/decorators.cpython-312.pyc | Bin 0 -> 45792 bytes .../utils/__pycache__/heaps.cpython-312.pyc | Bin 0 -> 11824 bytes .../utils/__pycache__/mapped_queue.cpython-312.pyc | Bin 0 -> 11860 bytes .../utils/__pycache__/misc.cpython-312.pyc | Bin 0 -> 26246 bytes .../__pycache__/random_sequence.cpython-312.pyc | Bin 0 -> 5471 bytes .../networkx/utils/__pycache__/rcm.cpython-312.pyc | Bin 0 -> 5840 bytes .../utils/__pycache__/union_find.cpython-312.pyc | Bin 0 -> 4403 bytes .../site-packages/networkx/utils/backends.py | 2143 ++++++++++++++ .../site-packages/networkx/utils/configs.py | 391 +++ .../site-packages/networkx/utils/decorators.py | 1233 ++++++++ .../site-packages/networkx/utils/heaps.py | 338 +++ .../site-packages/networkx/utils/mapped_queue.py | 297 ++ .../site-packages/networkx/utils/misc.py | 651 ++++ .../networkx/utils/random_sequence.py | 164 + .../python3.12/site-packages/networkx/utils/rcm.py | 159 + .../site-packages/networkx/utils/tests/__init__.py | 0 .../tests/__pycache__/__init__.cpython-312.pyc | Bin 0 -> 197 bytes .../tests/__pycache__/test__init.cpython-312.pyc | Bin 0 -> 984 bytes .../__pycache__/test_backends.cpython-312.pyc | Bin 0 -> 9985 bytes .../tests/__pycache__/test_config.cpython-312.pyc | Bin 0 -> 18071 bytes .../__pycache__/test_decorators.cpython-312.pyc | Bin 0 -> 31724 bytes .../tests/__pycache__/test_heaps.cpython-312.pyc | Bin 0 -> 5488 bytes .../__pycache__/test_mapped_queue.cpython-312.pyc | Bin 0 -> 14549 bytes .../tests/__pycache__/test_misc.cpython-312.pyc | Bin 0 -> 15920 bytes .../test_random_sequence.cpython-312.pyc | Bin 0 -> 1923 bytes .../tests/__pycache__/test_rcm.cpython-312.pyc | Bin 0 -> 2031 bytes .../__pycache__/test_unionfind.cpython-312.pyc | Bin 0 -> 2670 bytes .../networkx/utils/tests/test__init.py | 11 + .../networkx/utils/tests/test_backends.py | 187 ++ .../networkx/utils/tests/test_config.py | 263 ++ .../networkx/utils/tests/test_decorators.py | 510 ++++ .../networkx/utils/tests/test_heaps.py | 131 + .../networkx/utils/tests/test_mapped_queue.py | 268 ++ .../networkx/utils/tests/test_misc.py | 268 ++ .../networkx/utils/tests/test_random_sequence.py | 38 + .../site-packages/networkx/utils/tests/test_rcm.py | 63 + .../networkx/utils/tests/test_unionfind.py | 55 + .../site-packages/networkx/utils/union_find.py | 106 + 1163 files changed, 187933 insertions(+) create mode 100644 .venv/lib/python3.12/site-packages/networkx/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/__pycache__/conftest.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/__pycache__/convert.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/__pycache__/convert_matrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/__pycache__/exception.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/__pycache__/lazy_imports.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/__pycache__/relabel.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/asteroidal.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/boundary.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/bridges.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/broadcasting.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/chains.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/chordal.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/clique.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cluster.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/communicability_alg.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/core.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/covering.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cuts.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cycles.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dag.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/distance_measures.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/distance_regular.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dominance.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dominating.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/euler.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/graphical.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/hierarchy.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/isolate.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/matching.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/mis.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/moral.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/node_classification.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/non_randomness.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/planarity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/polynomials.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/regular.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/richclub.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/similarity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/simple_paths.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/smetric.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/summarization.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/swap.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/threshold.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/time_dependent.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/tournament.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/triads.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/vitality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/voronoi.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/walks.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/wiener.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/density.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/density.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_density.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_matching.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_clique.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_density.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_distance_measures.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_matching.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_maxcut.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/base_test.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/extendability.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/link_analysis.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/link_analysis.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matching.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/projection.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/redundancy.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/spectral.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_link_analysis.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_basic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_covering.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_generators.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_link_analysis.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matching.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_project.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/laplacian.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness_subset.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/closeness.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_closeness.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/degree_alg.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/dispersion.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/eigenvector.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/flow_matrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/group.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/harmonic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/katz.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/laplacian.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/load.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/percolation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/reaching.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/second_order.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/subgraph_alg.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality_subset.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality_subset.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_closeness.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_group.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_laplacian_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_load_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_trophic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_voterank.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_closeness.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_group.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_harmonic_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_percolation_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_reaching.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_trophic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_voterank.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/trophic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/centrality/voterank_alg.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/chains.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/clique.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/equitable_coloring.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/greedy_coloring.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__pycache__/test_coloring.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/test_coloring.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/divisive.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/leiden.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/local.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/asyn_fluid.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/community_utils.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/divisive.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/kclique.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/kernighan_lin.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/label_propagation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/leiden.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/local.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/louvain.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/lukes.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/modularity_max.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/quality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_divisive.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_leiden.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_local.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_centrality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_divisive.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kclique.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_label_propagation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_leiden.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_local.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_louvain.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_lukes.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_modularity_max.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_quality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_utils.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/attracting.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/biconnected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/connected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/semiconnected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/strongly_connected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_connected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_semiconnected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_strongly_connected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_weakly_connected.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_attracting.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_biconnected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_connected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_semiconnected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/components/weakly_connected.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/connectivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/cuts.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/utils.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/connectivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/cuts.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/disjoint_paths.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_augmentation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcomponents.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcutsets.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/stoerwagner.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_connectivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_cuts.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_disjoint_paths.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_augmentation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_kcomponents.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcutsets.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_stoer_wagner.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_connectivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_cuts.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_disjoint_paths.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_augmentation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcomponents.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcutsets.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/utils.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/core.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/covering.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/cycles.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/d_separation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/dag.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/distance_measures.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/distance_regular.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/dominance.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/dominating.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/efficiency_measures.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/euler.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/capacityscaling.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/edmondskarp.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/maxflow.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/networksimplex.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/preflowpush.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/utils.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/boykovkolmogorov.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/capacityscaling.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/dinitz_alg.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/edmondskarp.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/gomory_hu.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/maxflow.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/mincost.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/networksimplex.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/preflowpush.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/shortestaugmentingpath.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_gomory_hu.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow_large_graph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_mincost.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_networksimplex.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2 create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2 create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2 create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow_large_graph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_mincost.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_networksimplex.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2 create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/flow/utils.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/graph_hashing.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/graphical.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/hierarchy.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/hybrid.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isolate.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_match_helpers.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tree_isomorphism.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/hits_alg.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/link_prediction.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/lowest_common_ancestors.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/matching.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/minors/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/minors/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/minors/__pycache__/contraction.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/minors/contraction.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/__pycache__/test_contraction.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/test_contraction.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/mis.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/moral.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/node_classification.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/non_randomness.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/all.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/binary.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/unary.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/all.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/binary.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/product.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_all.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_binary.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_product.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_unary.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_all.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_binary.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_product.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_unary.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/operators/unary.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/planar_drawing.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/planarity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/polynomials.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/reciprocity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/regular.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/richclub.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/astar.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/generic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/astar.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/dense.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/generic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_astar.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_dense.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_dense_numpy.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_generic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_unweighted.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_weighted.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_astar.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_generic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_unweighted.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_weighted.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/unweighted.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/weighted.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/similarity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/simple_paths.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/smallworld.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/smetric.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/sparsifiers.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/structuralholes.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/summarization.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/swap.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_broadcasting.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_chains.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_chordal.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_clique.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cluster.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_communicability.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_covering.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cuts.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cycles.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_d_separation.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dag.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_distance_measures.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_distance_regular.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dominance.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dominating.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_efficiency.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_euler.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_graphical.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_hierarchy.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_hybrid.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_isolate.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_link_prediction.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_matching.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_max_weight_clique.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_node_classification.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_non_randomness.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_planar_drawing.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_planarity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_reciprocity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_regular.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_similarity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_smallworld.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_smetric.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_sparsifiers.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_structuralholes.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_summarization.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_triads.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_vitality.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_voronoi.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_walks.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_wiener.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_asteroidal.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_boundary.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_bridges.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_broadcasting.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chains.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chordal.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_clique.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cluster.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_communicability.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_core.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_covering.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cuts.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cycles.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_d_separation.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dag.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_measures.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_regular.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominance.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominating.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_efficiency.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_euler.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graph_hashing.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graphical.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hierarchy.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hybrid.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_isolate.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_link_prediction.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_matching.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_max_weight_clique.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_mis.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_moral.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_node_classification.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_non_randomness.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planar_drawing.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planarity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_polynomials.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_reciprocity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_regular.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_richclub.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_similarity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_simple_paths.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smallworld.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smetric.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_sparsifiers.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_structuralholes.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_summarization.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_swap.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_threshold.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_time_dependent.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_tournament.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_triads.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_vitality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_voronoi.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_walks.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_wiener.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/threshold.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/time_dependent.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tournament.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/beamsearch.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/edgebfs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/edgedfs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/beamsearch.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/breadth_first_search.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/depth_first_search.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgebfs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgedfs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_beamsearch.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_bfs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_dfs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_edgebfs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_edgedfs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_beamsearch.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_bfs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_dfs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgebfs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgedfs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/branchings.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/coding.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/decomposition.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/mst.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/operations.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/recognition.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/branchings.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/coding.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/decomposition.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/mst.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/operations.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/recognition.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_branchings.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_coding.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_decomposition.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_mst.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_operations.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_recognition.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_branchings.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_coding.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_decomposition.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_mst.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_operations.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_recognition.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/triads.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/vitality.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/voronoi.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/walks.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/algorithms/wiener.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/coreviews.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/digraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/filters.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/function.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/graph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/graphviews.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/multidigraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/multigraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/__pycache__/reportviews.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/coreviews.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/digraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/filters.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/function.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/graph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/graphviews.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/multidigraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/multigraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/reportviews.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/historical_tests.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_digraph_historical.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_function.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_multidigraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/dispatch_interface.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/historical_tests.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_coreviews.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_digraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_digraph_historical.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_filters.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_function.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_graph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_graph_historical.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_graphviews.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_multidigraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_multigraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_reportviews.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_special.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/classes/tests/test_subgraphviews.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/conftest.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/convert.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/convert_matrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/layout.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_latex.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_pydot.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/layout.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/nx_agraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/nx_latex.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/nx_pydot.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/nx_pylab.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_agraph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_latex.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_layout.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_pydot.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_pylab.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_complex.png create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_empty_graph.png create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_house_with_colors.png create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_labels_and_colors.png create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_shortest_path.png create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_house_with_colors.png create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/test_agraph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/test_latex.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/test_layout.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/test_pydot.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/drawing/tests/test_pylab.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/exception.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/atlas.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/classic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/cographs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/community.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/degree_seq.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/directed.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/duplication.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/ego.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/expanders.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/geometric.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/harary_graph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/internet_as_graphs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/intersection.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/interval_graph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/joint_degree_seq.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/lattice.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/line.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/mycielski.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/nonisomorphic_trees.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/random_clustered.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/random_graphs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/small.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/social.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/spectral_graph_forge.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/stochastic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/sudoku.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/time_series.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/trees.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/__pycache__/triads.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/atlas.dat.gz create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/atlas.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/classic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/cographs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/community.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/degree_seq.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/directed.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/duplication.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/ego.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/expanders.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/geometric.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/harary_graph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/internet_as_graphs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/intersection.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/interval_graph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/joint_degree_seq.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/lattice.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/line.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/mycielski.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/nonisomorphic_trees.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/random_clustered.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/random_graphs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/small.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/social.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/spectral_graph_forge.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/stochastic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/sudoku.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_atlas.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_classic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_cographs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_community.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_degree_seq.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_directed.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_duplication.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_ego.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_expanders.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_geometric.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_harary_graph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_internet_as_graphs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_intersection.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_interval_graph.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_joint_degree_seq.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_lattice.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_line.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_mycielski.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_nonisomorphic_trees.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_random_clustered.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_random_graphs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_small.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_spectral_graph_forge.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_stochastic.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_sudoku.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_time_series.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_trees.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_triads.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_atlas.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_classic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_cographs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_community.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_degree_seq.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_directed.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_duplication.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_ego.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_expanders.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_geometric.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_harary_graph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_internet_as_graphs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_intersection.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_interval_graph.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_joint_degree_seq.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_lattice.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_line.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_mycielski.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_nonisomorphic_trees.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_random_clustered.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_random_graphs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_small.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_spectral_graph_forge.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_stochastic.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_sudoku.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_time_series.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_trees.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/tests/test_triads.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/time_series.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/trees.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/generators/triads.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/lazy_imports.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/attrmatrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/bethehessianmatrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/graphmatrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/spectrum.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/algebraicconnectivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/attrmatrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/bethehessianmatrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/graphmatrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/laplacianmatrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/modularitymatrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/spectrum.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_algebraic_connectivity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_attrmatrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_bethehessian.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_graphmatrix.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_laplacian.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_modularity.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_spectrum.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/test_algebraic_connectivity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/test_attrmatrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/test_bethehessian.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/test_graphmatrix.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/test_laplacian.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/test_modularity.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/linalg/tests/test_spectrum.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/edgelist.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/gexf.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/gml.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/graph6.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/graphml.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/leda.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/multiline_adjlist.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/p2g.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/pajek.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/text.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/adjlist.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/edgelist.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/gexf.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/gml.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/graph6.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/graphml.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/adjacency.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/node_link.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/tree.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/adjacency.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/cytoscape.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/node_link.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_adjacency.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_cytoscape.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_node_link.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_tree.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_adjacency.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_cytoscape.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_node_link.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_tree.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tree.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/leda.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/multiline_adjlist.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/p2g.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/pajek.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/sparse6.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_adjlist.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_edgelist.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_gexf.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_gml.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_graph6.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_graphml.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_leda.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_p2g.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_pajek.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_sparse6.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_text.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_adjlist.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_edgelist.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_gexf.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_gml.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_graph6.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_graphml.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_leda.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_p2g.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_pajek.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_sparse6.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_text.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/readwrite/text.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/relabel.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_all_random_functions.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_numpy.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_pandas.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_scipy.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_exceptions.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_import.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_lazy_imports.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_relabel.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_all_random_functions.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_convert.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_convert_numpy.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_convert_pandas.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_convert_scipy.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_exceptions.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_import.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_lazy_imports.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/tests/test_relabel.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/backends.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/configs.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/decorators.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/heaps.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/mapped_queue.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/misc.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/random_sequence.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/rcm.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/__pycache__/union_find.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/backends.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/configs.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/decorators.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/heaps.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/mapped_queue.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/misc.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/random_sequence.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/rcm.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__init__.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_unionfind.cpython-312.pyc create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test__init.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_backends.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_config.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_decorators.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_heaps.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_mapped_queue.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_misc.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_random_sequence.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_rcm.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/tests/test_unionfind.py create mode 100644 .venv/lib/python3.12/site-packages/networkx/utils/union_find.py (limited to '.venv/lib/python3.12/site-packages/networkx') diff --git a/.venv/lib/python3.12/site-packages/networkx/__init__.py b/.venv/lib/python3.12/site-packages/networkx/__init__.py new file mode 100644 index 0000000..bed45be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/__init__.py @@ -0,0 +1,53 @@ +""" +NetworkX +======== + +NetworkX is a Python package for the creation, manipulation, and study of the +structure, dynamics, and functions of complex networks. + +See https://networkx.org for complete documentation. +""" + +__version__ = "3.5" + + +# These are imported in order as listed +from networkx.lazy_imports import _lazy_import + +from networkx.exception import * + +from networkx import utils +from networkx.utils import _clear_cache, _dispatchable + +# load_and_call entry_points, set configs +config = utils.backends._set_configs_from_environment() +utils.config = utils.configs.config = config # type: ignore[attr-defined] + +from networkx import classes +from networkx.classes import filters +from networkx.classes import * + +from networkx import convert +from networkx.convert import * + +from networkx import convert_matrix +from networkx.convert_matrix import * + +from networkx import relabel +from networkx.relabel import * + +from networkx import generators +from networkx.generators import * + +from networkx import readwrite +from networkx.readwrite import * + +# Need to test with SciPy, when available +from networkx import algorithms +from networkx.algorithms import * + +from networkx import linalg +from networkx.linalg import * + +from networkx import drawing +from networkx.drawing import * diff --git a/.venv/lib/python3.12/site-packages/networkx/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..cf3c8b8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/__pycache__/conftest.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/__pycache__/conftest.cpython-312.pyc new file mode 100644 index 0000000..2668f9e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/__pycache__/conftest.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert.cpython-312.pyc new file mode 100644 index 0000000..f1900e5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert_matrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert_matrix.cpython-312.pyc new file mode 100644 index 0000000..73f4e50 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/__pycache__/convert_matrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/__pycache__/exception.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/__pycache__/exception.cpython-312.pyc new file mode 100644 index 0000000..8ece7bc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/__pycache__/exception.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/__pycache__/lazy_imports.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/__pycache__/lazy_imports.cpython-312.pyc new file mode 100644 index 0000000..97cee14 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/__pycache__/lazy_imports.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/__pycache__/relabel.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/__pycache__/relabel.cpython-312.pyc new file mode 100644 index 0000000..6b85571 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/__pycache__/relabel.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py new file mode 100644 index 0000000..56bfb14 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/__init__.py @@ -0,0 +1,133 @@ +from networkx.algorithms.assortativity import * +from networkx.algorithms.asteroidal import * +from networkx.algorithms.boundary import * +from networkx.algorithms.broadcasting import * +from networkx.algorithms.bridges import * +from networkx.algorithms.chains import * +from networkx.algorithms.centrality import * +from networkx.algorithms.chordal import * +from networkx.algorithms.cluster import * +from networkx.algorithms.clique import * +from networkx.algorithms.communicability_alg import * +from networkx.algorithms.components import * +from networkx.algorithms.coloring import * +from networkx.algorithms.core import * +from networkx.algorithms.covering import * +from networkx.algorithms.cycles import * +from networkx.algorithms.cuts import * +from networkx.algorithms.d_separation import * +from networkx.algorithms.dag import * +from networkx.algorithms.distance_measures import * +from networkx.algorithms.distance_regular import * +from networkx.algorithms.dominance import * +from networkx.algorithms.dominating import * +from networkx.algorithms.efficiency_measures import * +from networkx.algorithms.euler import * +from networkx.algorithms.graphical import * +from networkx.algorithms.hierarchy import * +from networkx.algorithms.hybrid import * +from networkx.algorithms.link_analysis import * +from networkx.algorithms.link_prediction import * +from networkx.algorithms.lowest_common_ancestors import * +from networkx.algorithms.isolate import * +from networkx.algorithms.matching import * +from networkx.algorithms.minors import * +from networkx.algorithms.mis import * +from networkx.algorithms.moral import * +from networkx.algorithms.non_randomness import * +from networkx.algorithms.operators import * +from networkx.algorithms.planarity import * +from networkx.algorithms.planar_drawing import * +from networkx.algorithms.polynomials import * +from networkx.algorithms.reciprocity import * +from networkx.algorithms.regular import * +from networkx.algorithms.richclub import * +from networkx.algorithms.shortest_paths import * +from networkx.algorithms.similarity import * +from networkx.algorithms.graph_hashing import * +from networkx.algorithms.simple_paths import * +from networkx.algorithms.smallworld import * +from networkx.algorithms.smetric import * +from networkx.algorithms.structuralholes import * +from networkx.algorithms.sparsifiers import * +from networkx.algorithms.summarization import * +from networkx.algorithms.swap import * +from networkx.algorithms.time_dependent import * +from networkx.algorithms.traversal import * +from networkx.algorithms.triads import * +from networkx.algorithms.vitality import * +from networkx.algorithms.voronoi import * +from networkx.algorithms.walks import * +from networkx.algorithms.wiener import * + +# Make certain subpackages available to the user as direct imports from +# the `networkx` namespace. +from networkx.algorithms import approximation +from networkx.algorithms import assortativity +from networkx.algorithms import bipartite +from networkx.algorithms import node_classification +from networkx.algorithms import centrality +from networkx.algorithms import chordal +from networkx.algorithms import cluster +from networkx.algorithms import clique +from networkx.algorithms import components +from networkx.algorithms import connectivity +from networkx.algorithms import community +from networkx.algorithms import coloring +from networkx.algorithms import flow +from networkx.algorithms import isomorphism +from networkx.algorithms import link_analysis +from networkx.algorithms import lowest_common_ancestors +from networkx.algorithms import operators +from networkx.algorithms import shortest_paths +from networkx.algorithms import tournament +from networkx.algorithms import traversal +from networkx.algorithms import tree + +# Make certain functions from some of the previous subpackages available +# to the user as direct imports from the `networkx` namespace. +from networkx.algorithms.bipartite import complete_bipartite_graph +from networkx.algorithms.bipartite import is_bipartite +from networkx.algorithms.bipartite import projected_graph +from networkx.algorithms.connectivity import all_pairs_node_connectivity +from networkx.algorithms.connectivity import all_node_cuts +from networkx.algorithms.connectivity import average_node_connectivity +from networkx.algorithms.connectivity import edge_connectivity +from networkx.algorithms.connectivity import edge_disjoint_paths +from networkx.algorithms.connectivity import k_components +from networkx.algorithms.connectivity import k_edge_components +from networkx.algorithms.connectivity import k_edge_subgraphs +from networkx.algorithms.connectivity import k_edge_augmentation +from networkx.algorithms.connectivity import is_k_edge_connected +from networkx.algorithms.connectivity import minimum_edge_cut +from networkx.algorithms.connectivity import minimum_node_cut +from networkx.algorithms.connectivity import node_connectivity +from networkx.algorithms.connectivity import node_disjoint_paths +from networkx.algorithms.connectivity import stoer_wagner +from networkx.algorithms.flow import capacity_scaling +from networkx.algorithms.flow import cost_of_flow +from networkx.algorithms.flow import gomory_hu_tree +from networkx.algorithms.flow import max_flow_min_cost +from networkx.algorithms.flow import maximum_flow +from networkx.algorithms.flow import maximum_flow_value +from networkx.algorithms.flow import min_cost_flow +from networkx.algorithms.flow import min_cost_flow_cost +from networkx.algorithms.flow import minimum_cut +from networkx.algorithms.flow import minimum_cut_value +from networkx.algorithms.flow import network_simplex +from networkx.algorithms.isomorphism import could_be_isomorphic +from networkx.algorithms.isomorphism import fast_could_be_isomorphic +from networkx.algorithms.isomorphism import faster_could_be_isomorphic +from networkx.algorithms.isomorphism import is_isomorphic +from networkx.algorithms.isomorphism.vf2pp import * +from networkx.algorithms.tree.branchings import maximum_branching +from networkx.algorithms.tree.branchings import maximum_spanning_arborescence +from networkx.algorithms.tree.branchings import minimum_branching +from networkx.algorithms.tree.branchings import minimum_spanning_arborescence +from networkx.algorithms.tree.branchings import ArborescenceIterator +from networkx.algorithms.tree.coding import * +from networkx.algorithms.tree.decomposition import * +from networkx.algorithms.tree.mst import * +from networkx.algorithms.tree.operations import * +from networkx.algorithms.tree.recognition import * +from networkx.algorithms.tournament import is_tournament diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..add085d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/asteroidal.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/asteroidal.cpython-312.pyc new file mode 100644 index 0000000..5a8296a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/asteroidal.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/boundary.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/boundary.cpython-312.pyc new file mode 100644 index 0000000..6cc35fb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/boundary.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/bridges.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/bridges.cpython-312.pyc new file mode 100644 index 0000000..ff01790 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/bridges.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/broadcasting.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/broadcasting.cpython-312.pyc new file mode 100644 index 0000000..09fa4e3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/broadcasting.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/chains.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/chains.cpython-312.pyc new file mode 100644 index 0000000..ac33ea7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/chains.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/chordal.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/chordal.cpython-312.pyc new file mode 100644 index 0000000..057ef97 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/chordal.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/clique.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/clique.cpython-312.pyc new file mode 100644 index 0000000..32144d8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/clique.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cluster.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cluster.cpython-312.pyc new file mode 100644 index 0000000..92872f5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cluster.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/communicability_alg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/communicability_alg.cpython-312.pyc new file mode 100644 index 0000000..4f644c0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/communicability_alg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/core.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000..98402bb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/core.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/covering.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/covering.cpython-312.pyc new file mode 100644 index 0000000..bd0cbee Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/covering.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cuts.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cuts.cpython-312.pyc new file mode 100644 index 0000000..4ee1dd2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cuts.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cycles.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cycles.cpython-312.pyc new file mode 100644 index 0000000..5029675 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/cycles.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-312.pyc new file mode 100644 index 0000000..a53d1a6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/d_separation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dag.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dag.cpython-312.pyc new file mode 100644 index 0000000..7b8576f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dag.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/distance_measures.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/distance_measures.cpython-312.pyc new file mode 100644 index 0000000..ff1029e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/distance_measures.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/distance_regular.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/distance_regular.cpython-312.pyc new file mode 100644 index 0000000..829b376 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/distance_regular.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dominance.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dominance.cpython-312.pyc new file mode 100644 index 0000000..7128df6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dominance.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dominating.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dominating.cpython-312.pyc new file mode 100644 index 0000000..5633734 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/dominating.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-312.pyc new file mode 100644 index 0000000..4fbae28 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/efficiency_measures.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/euler.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/euler.cpython-312.pyc new file mode 100644 index 0000000..c4fe65f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/euler.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-312.pyc new file mode 100644 index 0000000..469e35e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/graph_hashing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/graphical.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/graphical.cpython-312.pyc new file mode 100644 index 0000000..abe31e0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/graphical.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/hierarchy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/hierarchy.cpython-312.pyc new file mode 100644 index 0000000..ce9a787 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/hierarchy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-312.pyc new file mode 100644 index 0000000..5aeff0d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/hybrid.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/isolate.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/isolate.cpython-312.pyc new file mode 100644 index 0000000..d78b896 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/isolate.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-312.pyc new file mode 100644 index 0000000..82b4114 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/link_prediction.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-312.pyc new file mode 100644 index 0000000..2c2c01d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/lowest_common_ancestors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/matching.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/matching.cpython-312.pyc new file mode 100644 index 0000000..c9316a0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/matching.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/mis.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/mis.cpython-312.pyc new file mode 100644 index 0000000..3c1c8ce Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/mis.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/moral.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/moral.cpython-312.pyc new file mode 100644 index 0000000..0b5ae00 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/moral.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/node_classification.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/node_classification.cpython-312.pyc new file mode 100644 index 0000000..c023823 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/node_classification.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/non_randomness.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/non_randomness.cpython-312.pyc new file mode 100644 index 0000000..6e0798b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/non_randomness.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-312.pyc new file mode 100644 index 0000000..026a588 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/planar_drawing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/planarity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/planarity.cpython-312.pyc new file mode 100644 index 0000000..25c9afa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/planarity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/polynomials.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/polynomials.cpython-312.pyc new file mode 100644 index 0000000..d25f863 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/polynomials.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-312.pyc new file mode 100644 index 0000000..9c439a1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/reciprocity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/regular.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/regular.cpython-312.pyc new file mode 100644 index 0000000..ceac357 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/regular.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/richclub.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/richclub.cpython-312.pyc new file mode 100644 index 0000000..6038a8b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/richclub.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/similarity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/similarity.cpython-312.pyc new file mode 100644 index 0000000..6e4b450 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/similarity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/simple_paths.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/simple_paths.cpython-312.pyc new file mode 100644 index 0000000..4dfa8b5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/simple_paths.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-312.pyc new file mode 100644 index 0000000..35c1609 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/smallworld.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/smetric.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/smetric.cpython-312.pyc new file mode 100644 index 0000000..1fd52f1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/smetric.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-312.pyc new file mode 100644 index 0000000..0fe826e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/sparsifiers.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-312.pyc new file mode 100644 index 0000000..de1a936 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/structuralholes.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/summarization.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/summarization.cpython-312.pyc new file mode 100644 index 0000000..ff7191d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/summarization.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/swap.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/swap.cpython-312.pyc new file mode 100644 index 0000000..9eacf1c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/swap.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/threshold.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/threshold.cpython-312.pyc new file mode 100644 index 0000000..edb9fcb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/threshold.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/time_dependent.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/time_dependent.cpython-312.pyc new file mode 100644 index 0000000..ce55937 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/time_dependent.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/tournament.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/tournament.cpython-312.pyc new file mode 100644 index 0000000..60c5b3f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/tournament.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/triads.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/triads.cpython-312.pyc new file mode 100644 index 0000000..c49a4d3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/triads.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/vitality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/vitality.cpython-312.pyc new file mode 100644 index 0000000..3892b1c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/vitality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/voronoi.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/voronoi.cpython-312.pyc new file mode 100644 index 0000000..916f901 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/voronoi.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/walks.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/walks.cpython-312.pyc new file mode 100644 index 0000000..d48b47e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/walks.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/wiener.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/wiener.cpython-312.pyc new file mode 100644 index 0000000..4e0af60 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/__pycache__/wiener.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py new file mode 100644 index 0000000..b0b4015 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__init__.py @@ -0,0 +1,26 @@ +"""Approximations of graph properties and Heuristic methods for optimization. + +The functions in this class are not imported into the top-level ``networkx`` +namespace so the easiest way to use them is with:: + + >>> from networkx.algorithms import approximation + +Another option is to import the specific function with +``from networkx.algorithms.approximation import function_name``. + +""" + +from networkx.algorithms.approximation.clustering_coefficient import * +from networkx.algorithms.approximation.clique import * +from networkx.algorithms.approximation.connectivity import * +from networkx.algorithms.approximation.distance_measures import * +from networkx.algorithms.approximation.dominating_set import * +from networkx.algorithms.approximation.kcomponents import * +from networkx.algorithms.approximation.matching import * +from networkx.algorithms.approximation.ramsey import * +from networkx.algorithms.approximation.steinertree import * +from networkx.algorithms.approximation.traveling_salesman import * +from networkx.algorithms.approximation.treewidth import * +from networkx.algorithms.approximation.vertex_cover import * +from networkx.algorithms.approximation.maxcut import * +from networkx.algorithms.approximation.density import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..402d9a2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-312.pyc new file mode 100644 index 0000000..b55667e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/clique.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-312.pyc new file mode 100644 index 0000000..1f1b589 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/clustering_coefficient.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-312.pyc new file mode 100644 index 0000000..8322446 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/connectivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/density.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/density.cpython-312.pyc new file mode 100644 index 0000000..e3bd7ca Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/density.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-312.pyc new file mode 100644 index 0000000..d4e7565 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/distance_measures.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-312.pyc new file mode 100644 index 0000000..3095786 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/dominating_set.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-312.pyc new file mode 100644 index 0000000..183ce72 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/kcomponents.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-312.pyc new file mode 100644 index 0000000..3a4f150 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/matching.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-312.pyc new file mode 100644 index 0000000..7eace4d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/maxcut.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-312.pyc new file mode 100644 index 0000000..2e19871 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/ramsey.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-312.pyc new file mode 100644 index 0000000..162e363 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/steinertree.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-312.pyc new file mode 100644 index 0000000..22b41fc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/traveling_salesman.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-312.pyc new file mode 100644 index 0000000..8cffcef Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/treewidth.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-312.pyc new file mode 100644 index 0000000..e7a2c29 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/__pycache__/vertex_cover.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py new file mode 100644 index 0000000..ed0f350 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clique.py @@ -0,0 +1,259 @@ +"""Functions for computing large cliques and maximum independent sets.""" + +import networkx as nx +from networkx.algorithms.approximation import ramsey +from networkx.utils import not_implemented_for + +__all__ = [ + "clique_removal", + "max_clique", + "large_clique_size", + "maximum_independent_set", +] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def maximum_independent_set(G): + """Returns an approximate maximum independent set. + + Independent set or stable set is a set of vertices in a graph, no two of + which are adjacent. That is, it is a set I of vertices such that for every + two vertices in I, there is no edge connecting the two. Equivalently, each + edge in the graph has at most one endpoint in I. The size of an independent + set is the number of vertices it contains [1]_. + + A maximum independent set is a largest independent set for a given graph G + and its size is denoted $\\alpha(G)$. The problem of finding such a set is called + the maximum independent set problem and is an NP-hard optimization problem. + As such, it is unlikely that there exists an efficient algorithm for finding + a maximum independent set of a graph. + + The Independent Set algorithm is based on [2]_. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + iset : Set + The apx-maximum independent set + + Examples + -------- + >>> G = nx.path_graph(10) + >>> nx.approximation.maximum_independent_set(G) + {0, 2, 4, 6, 9} + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + Notes + ----- + Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case. + + References + ---------- + .. [1] `Wikipedia: Independent set + `_ + .. [2] Boppana, R., & Halldórsson, M. M. (1992). + Approximating maximum independent sets by excluding subgraphs. + BIT Numerical Mathematics, 32(2), 180–196. Springer. + """ + iset, _ = clique_removal(G) + return iset + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def max_clique(G): + r"""Find the Maximum Clique + + Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set + in the worst case. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + clique : set + The apx-maximum clique of the graph + + Examples + -------- + >>> G = nx.path_graph(10) + >>> nx.approximation.max_clique(G) + {8, 9} + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + Notes + ----- + A clique in an undirected graph G = (V, E) is a subset of the vertex set + `C \subseteq V` such that for every two vertices in C there exists an edge + connecting the two. This is equivalent to saying that the subgraph + induced by C is complete (in some cases, the term clique may also refer + to the subgraph). + + A maximum clique is a clique of the largest possible size in a given graph. + The clique number `\omega(G)` of a graph G is the number of + vertices in a maximum clique in G. The intersection number of + G is the smallest number of cliques that together cover all edges of G. + + https://en.wikipedia.org/wiki/Maximum_clique + + References + ---------- + .. [1] Boppana, R., & Halldórsson, M. M. (1992). + Approximating maximum independent sets by excluding subgraphs. + BIT Numerical Mathematics, 32(2), 180–196. Springer. + doi:10.1007/BF01994876 + """ + # finding the maximum clique in a graph is equivalent to finding + # the independent set in the complementary graph + cgraph = nx.complement(G) + iset, _ = clique_removal(cgraph) + return iset + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def clique_removal(G): + r"""Repeatedly remove cliques from the graph. + + Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique + and independent set. Returns the largest independent set found, along + with found maximal cliques. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + max_ind_cliques : (set, list) tuple + 2-tuple of Maximal Independent Set and list of maximal cliques (sets). + + Examples + -------- + >>> G = nx.path_graph(10) + >>> nx.approximation.clique_removal(G) + ({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}]) + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Boppana, R., & Halldórsson, M. M. (1992). + Approximating maximum independent sets by excluding subgraphs. + BIT Numerical Mathematics, 32(2), 180–196. Springer. + """ + graph = G.copy() + c_i, i_i = ramsey.ramsey_R2(graph) + cliques = [c_i] + isets = [i_i] + while graph: + graph.remove_nodes_from(c_i) + c_i, i_i = ramsey.ramsey_R2(graph) + if c_i: + cliques.append(c_i) + if i_i: + isets.append(i_i) + # Determine the largest independent set as measured by cardinality. + maxiset = max(isets, key=len) + return maxiset, cliques + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def large_clique_size(G): + """Find the size of a large clique in a graph. + + A *clique* is a subset of nodes in which each pair of nodes is + adjacent. This function is a heuristic for finding the size of a + large clique in the graph. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + k: integer + The size of a large clique in the graph. + + Examples + -------- + >>> G = nx.path_graph(10) + >>> nx.approximation.large_clique_size(G) + 2 + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + Notes + ----- + This implementation is from [1]_. Its worst case time complexity is + :math:`O(n d^2)`, where *n* is the number of nodes in the graph and + *d* is the maximum degree. + + This function is a heuristic, which means it may work well in + practice, but there is no rigorous mathematical guarantee on the + ratio between the returned number and the actual largest clique size + in the graph. + + References + ---------- + .. [1] Pattabiraman, Bharath, et al. + "Fast Algorithms for the Maximum Clique Problem on Massive Graphs + with Applications to Overlapping Community Detection." + *Internet Mathematics* 11.4-5 (2015): 421--448. + + + See also + -------- + + :func:`networkx.algorithms.approximation.clique.max_clique` + A function that returns an approximate maximum clique with a + guarantee on the approximation ratio. + + :mod:`networkx.algorithms.clique` + Functions for finding the exact maximum clique in a graph. + + """ + degrees = G.degree + + def _clique_heuristic(G, U, size, best_size): + if not U: + return max(best_size, size) + u = max(U, key=degrees) + U.remove(u) + N_prime = {v for v in G[u] if degrees[v] >= best_size} + return _clique_heuristic(G, U & N_prime, size + 1, best_size) + + best_size = 0 + nodes = (u for u in G if degrees[u] >= best_size) + for u in nodes: + neighbors = {v for v in G[u] if degrees[v] >= best_size} + best_size = _clique_heuristic(G, neighbors, 1, best_size) + return best_size diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py new file mode 100644 index 0000000..545fc65 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/clustering_coefficient.py @@ -0,0 +1,71 @@ +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = ["average_clustering"] + + +@not_implemented_for("directed") +@py_random_state(2) +@nx._dispatchable(name="approximate_average_clustering") +def average_clustering(G, trials=1000, seed=None): + r"""Estimates the average clustering coefficient of G. + + The local clustering of each node in `G` is the fraction of triangles + that actually exist over all possible triangles in its neighborhood. + The average clustering coefficient of a graph `G` is the mean of + local clusterings. + + This function finds an approximate average clustering coefficient + for G by repeating `n` times (defined in `trials`) the following + experiment: choose a node at random, choose two of its neighbors + at random, and check if they are connected. The approximate + coefficient is the fraction of triangles found over the number + of trials [1]_. + + Parameters + ---------- + G : NetworkX graph + + trials : integer + Number of trials to perform (default 1000). + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + c : float + Approximated average clustering coefficient. + + Examples + -------- + >>> from networkx.algorithms import approximation + >>> G = nx.erdos_renyi_graph(10, 0.2, seed=10) + >>> approximation.average_clustering(G, trials=1000, seed=10) + 0.214 + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + References + ---------- + .. [1] Schank, Thomas, and Dorothea Wagner. Approximating clustering + coefficient and transitivity. Universität Karlsruhe, Fakultät für + Informatik, 2004. + https://doi.org/10.5445/IR/1000001239 + + """ + n = len(G) + triangles = 0 + nodes = list(G) + for i in [int(seed.random() * n) for i in range(trials)]: + nbrs = list(G[nodes[i]]) + if len(nbrs) < 2: + continue + u, v = seed.sample(nbrs, 2) + if u in G[v]: + triangles += 1 + return triangles / trials diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py new file mode 100644 index 0000000..0b596fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/connectivity.py @@ -0,0 +1,412 @@ +"""Fast approximation for node connectivity""" + +import itertools +from operator import itemgetter + +import networkx as nx + +__all__ = [ + "local_node_connectivity", + "node_connectivity", + "all_pairs_node_connectivity", +] + + +@nx._dispatchable(name="approximate_local_node_connectivity") +def local_node_connectivity(G, source, target, cutoff=None): + """Compute node connectivity between source and target. + + Pairwise or local node connectivity between two distinct and nonadjacent + nodes is the minimum number of nodes that must be removed (minimum + separating cutset) to disconnect them. By Menger's theorem, this is equal + to the number of node independent paths (paths that share no nodes other + than source and target). Which is what we compute in this function. + + This algorithm is a fast approximation that gives an strict lower + bound on the actual number of node independent paths between two nodes [1]_. + It works for both directed and undirected graphs. + + Parameters + ---------- + + G : NetworkX graph + + source : node + Starting node for node connectivity + + target : node + Ending node for node connectivity + + cutoff : integer + Maximum node connectivity to consider. If None, the minimum degree + of source or target is used as a cutoff. Default value None. + + Returns + ------- + k: integer + pairwise node connectivity + + Examples + -------- + >>> # Platonic octahedral graph has node connectivity 4 + >>> # for each non adjacent node pair + >>> from networkx.algorithms import approximation as approx + >>> G = nx.octahedral_graph() + >>> approx.local_node_connectivity(G, 0, 5) + 4 + + Notes + ----- + This algorithm [1]_ finds node independents paths between two nodes by + computing their shortest path using BFS, marking the nodes of the path + found as 'used' and then searching other shortest paths excluding the + nodes marked as used until no more paths exist. It is not exact because + a shortest path could use nodes that, if the path were longer, may belong + to two different node independent paths. Thus it only guarantees an + strict lower bound on node connectivity. + + Note that the authors propose a further refinement, losing accuracy and + gaining speed, which is not implemented yet. + + See also + -------- + all_pairs_node_connectivity + node_connectivity + + References + ---------- + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + http://eclectic.ss.uci.edu/~drwhite/working.pdf + + """ + if target == source: + raise nx.NetworkXError("source and target have to be different nodes.") + + # Maximum possible node independent paths + if G.is_directed(): + possible = min(G.out_degree(source), G.in_degree(target)) + else: + possible = min(G.degree(source), G.degree(target)) + + K = 0 + if not possible: + return K + + if cutoff is None: + cutoff = float("inf") + + exclude = set() + for i in range(min(possible, cutoff)): + try: + path = _bidirectional_shortest_path(G, source, target, exclude) + exclude.update(set(path)) + K += 1 + except nx.NetworkXNoPath: + break + + return K + + +@nx._dispatchable(name="approximate_node_connectivity") +def node_connectivity(G, s=None, t=None): + r"""Returns an approximation for node connectivity for a graph or digraph G. + + Node connectivity is equal to the minimum number of nodes that + must be removed to disconnect G or render it trivial. By Menger's theorem, + this is equal to the number of node independent paths (paths that + share no nodes other than source and target). + + If source and target nodes are provided, this function returns the + local node connectivity: the minimum number of nodes that must be + removed to break all paths from source to target in G. + + This algorithm is based on a fast approximation that gives an strict lower + bound on the actual number of node independent paths between two nodes [1]_. + It works for both directed and undirected graphs. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + s : node + Source node. Optional. Default value: None. + + t : node + Target node. Optional. Default value: None. + + Returns + ------- + K : integer + Node connectivity of G, or local node connectivity if source + and target are provided. + + Examples + -------- + >>> # Platonic octahedral graph is 4-node-connected + >>> from networkx.algorithms import approximation as approx + >>> G = nx.octahedral_graph() + >>> approx.node_connectivity(G) + 4 + + Notes + ----- + This algorithm [1]_ finds node independents paths between two nodes by + computing their shortest path using BFS, marking the nodes of the path + found as 'used' and then searching other shortest paths excluding the + nodes marked as used until no more paths exist. It is not exact because + a shortest path could use nodes that, if the path were longer, may belong + to two different node independent paths. Thus it only guarantees an + strict lower bound on node connectivity. + + See also + -------- + all_pairs_node_connectivity + local_node_connectivity + + References + ---------- + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + http://eclectic.ss.uci.edu/~drwhite/working.pdf + + """ + if (s is not None and t is None) or (s is None and t is not None): + raise nx.NetworkXError("Both source and target must be specified.") + + # Local node connectivity + if s is not None and t is not None: + if s not in G: + raise nx.NetworkXError(f"node {s} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {t} not in graph") + return local_node_connectivity(G, s, t) + + # Global node connectivity + if G.is_directed(): + connected_func = nx.is_weakly_connected + iter_func = itertools.permutations + + def neighbors(v): + return itertools.chain(G.predecessors(v), G.successors(v)) + + else: + connected_func = nx.is_connected + iter_func = itertools.combinations + neighbors = G.neighbors + + if not connected_func(G): + return 0 + + # Choose a node with minimum degree + v, minimum_degree = min(G.degree(), key=itemgetter(1)) + # Node connectivity is bounded by minimum degree + K = minimum_degree + # compute local node connectivity with all non-neighbors nodes + # and store the minimum + for w in set(G) - set(neighbors(v)) - {v}: + K = min(K, local_node_connectivity(G, v, w, cutoff=K)) + # Same for non adjacent pairs of neighbors of v + for x, y in iter_func(neighbors(v), 2): + if y not in G[x] and x != y: + K = min(K, local_node_connectivity(G, x, y, cutoff=K)) + return K + + +@nx._dispatchable(name="approximate_all_pairs_node_connectivity") +def all_pairs_node_connectivity(G, nbunch=None, cutoff=None): + """Compute node connectivity between all pairs of nodes. + + Pairwise or local node connectivity between two distinct and nonadjacent + nodes is the minimum number of nodes that must be removed (minimum + separating cutset) to disconnect them. By Menger's theorem, this is equal + to the number of node independent paths (paths that share no nodes other + than source and target). Which is what we compute in this function. + + This algorithm is a fast approximation that gives an strict lower + bound on the actual number of node independent paths between two nodes [1]_. + It works for both directed and undirected graphs. + + + Parameters + ---------- + G : NetworkX graph + + nbunch: container + Container of nodes. If provided node connectivity will be computed + only over pairs of nodes in nbunch. + + cutoff : integer + Maximum node connectivity to consider. If None, the minimum degree + of source or target is used as a cutoff in each pair of nodes. + Default value None. + + Returns + ------- + K : dictionary + Dictionary, keyed by source and target, of pairwise node connectivity + + Examples + -------- + A 3 node cycle with one extra node attached has connectivity 2 between all + nodes in the cycle and connectivity 1 between the extra node and the rest: + + >>> G = nx.cycle_graph(3) + >>> G.add_edge(2, 3) + >>> import pprint # for nice dictionary formatting + >>> pprint.pprint(nx.all_pairs_node_connectivity(G)) + {0: {1: 2, 2: 2, 3: 1}, + 1: {0: 2, 2: 2, 3: 1}, + 2: {0: 2, 1: 2, 3: 1}, + 3: {0: 1, 1: 1, 2: 1}} + + See Also + -------- + local_node_connectivity + node_connectivity + + References + ---------- + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + http://eclectic.ss.uci.edu/~drwhite/working.pdf + """ + if nbunch is None: + nbunch = G + else: + nbunch = set(nbunch) + + directed = G.is_directed() + if directed: + iter_func = itertools.permutations + else: + iter_func = itertools.combinations + + all_pairs = {n: {} for n in nbunch} + + for u, v in iter_func(nbunch, 2): + k = local_node_connectivity(G, u, v, cutoff=cutoff) + all_pairs[u][v] = k + if not directed: + all_pairs[v][u] = k + + return all_pairs + + +def _bidirectional_shortest_path(G, source, target, exclude): + """Returns shortest path between source and target ignoring nodes in the + container 'exclude'. + + Parameters + ---------- + + G : NetworkX graph + + source : node + Starting node for path + + target : node + Ending node for path + + exclude: container + Container for nodes to exclude from the search for shortest paths + + Returns + ------- + path: list + Shortest path between source and target ignoring nodes in 'exclude' + + Raises + ------ + NetworkXNoPath + If there is no path or if nodes are adjacent and have only one path + between them + + Notes + ----- + This function and its helper are originally from + networkx.algorithms.shortest_paths.unweighted and are modified to + accept the extra parameter 'exclude', which is a container for nodes + already used in other paths that should be ignored. + + References + ---------- + .. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + http://eclectic.ss.uci.edu/~drwhite/working.pdf + + """ + # call helper to do the real work + results = _bidirectional_pred_succ(G, source, target, exclude) + pred, succ, w = results + + # build path from pred+w+succ + path = [] + # from source to w + while w is not None: + path.append(w) + w = pred[w] + path.reverse() + # from w to target + w = succ[path[-1]] + while w is not None: + path.append(w) + w = succ[w] + + return path + + +def _bidirectional_pred_succ(G, source, target, exclude): + # does BFS from both source and target and meets in the middle + # excludes nodes in the container "exclude" from the search + + # handle either directed or undirected + if G.is_directed(): + Gpred = G.predecessors + Gsucc = G.successors + else: + Gpred = G.neighbors + Gsucc = G.neighbors + + # predecessor and successors in search + pred = {source: None} + succ = {target: None} + + # initialize fringes, start with forward + forward_fringe = [source] + reverse_fringe = [target] + + level = 0 + + while forward_fringe and reverse_fringe: + # Make sure that we iterate one step forward and one step backwards + # thus source and target will only trigger "found path" when they are + # adjacent and then they can be safely included in the container 'exclude' + level += 1 + if level % 2 != 0: + this_level = forward_fringe + forward_fringe = [] + for v in this_level: + for w in Gsucc(v): + if w in exclude: + continue + if w not in pred: + forward_fringe.append(w) + pred[w] = v + if w in succ: + return pred, succ, w # found path + else: + this_level = reverse_fringe + reverse_fringe = [] + for v in this_level: + for w in Gpred(v): + if w in exclude: + continue + if w not in succ: + succ[w] = v + reverse_fringe.append(w) + if w in pred: + return pred, succ, w # found path + + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/density.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/density.py new file mode 100644 index 0000000..7eb744c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/density.py @@ -0,0 +1,396 @@ +"""Fast algorithms for the densest subgraph problem""" + +import math + +import networkx as nx + +__all__ = ["densest_subgraph"] + + +def _greedy_plus_plus(G, iterations): + if G.number_of_edges() == 0: + return 0.0, set() + if iterations < 1: + raise ValueError( + f"The number of iterations must be an integer >= 1. Provided: {iterations}" + ) + + loads = dict.fromkeys(G.nodes, 0) # Load vector for Greedy++. + best_density = 0.0 # Highest density encountered. + best_subgraph = set() # Nodes of the best subgraph found. + + for _ in range(iterations): + # Initialize heap for fast access to minimum weighted degree. + heap = nx.utils.BinaryHeap() + + # Compute initial weighted degrees and add nodes to the heap. + for node, degree in G.degree: + heap.insert(node, loads[node] + degree) + # Set up tracking for current graph state. + remaining_nodes = set(G.nodes) + num_edges = G.number_of_edges() + current_degrees = dict(G.degree) + + while remaining_nodes: + num_nodes = len(remaining_nodes) + + # Current density of the (implicit) graph + current_density = num_edges / num_nodes + + # Update the best density. + if current_density > best_density: + best_density = current_density + best_subgraph = set(remaining_nodes) + + # Pop the node with the smallest weighted degree. + node, _ = heap.pop() + if node not in remaining_nodes: + continue # Skip nodes already removed. + + # Update the load of the popped node. + loads[node] += current_degrees[node] + + # Update neighbors' degrees and the heap. + for neighbor in G.neighbors(node): + if neighbor in remaining_nodes: + current_degrees[neighbor] -= 1 + num_edges -= 1 + heap.insert(neighbor, loads[neighbor] + current_degrees[neighbor]) + + # Remove the node from the remaining nodes. + remaining_nodes.remove(node) + + return best_density, best_subgraph + + +def _fractional_peeling(G, b, x, node_to_idx, edge_to_idx): + """ + Optimized fractional peeling using NumPy arrays. + + Parameters + ---------- + G : networkx.Graph + The input graph. + b : numpy.ndarray + Induced load vector. + x : numpy.ndarray + Fractional edge values. + node_to_idx : dict + Mapping from node to index. + edge_to_idx : dict + Mapping from edge to index. + + Returns + ------- + best_density : float + The best density found. + best_subgraph : set + The subset of nodes defining the densest subgraph. + """ + heap = nx.utils.BinaryHeap() + + remaining_nodes = set(G.nodes) + + # Initialize heap with b values + for idx in remaining_nodes: + heap.insert(idx, b[idx]) + + num_edges = G.number_of_edges() + + best_density = 0.0 + best_subgraph = set() + + while remaining_nodes: + num_nodes = len(remaining_nodes) + current_density = num_edges / num_nodes + + if current_density > best_density: + best_density = current_density + best_subgraph = set(remaining_nodes) + + # Pop the node with the smallest b + node, _ = heap.pop() + while node not in remaining_nodes: + node, _ = heap.pop() # Clean the heap from stale values + + # Update neighbors b values by subtracting fractional x value + for neighbor in G.neighbors(node): + if neighbor in remaining_nodes: + neighbor_idx = node_to_idx[neighbor] + # Take off fractional value + b[neighbor_idx] -= x[edge_to_idx[(neighbor, node)]] + num_edges -= 1 + heap.insert(neighbor, b[neighbor_idx]) + + remaining_nodes.remove(node) # peel off node + + return best_density, best_subgraph + + +def _fista(G, iterations): + if G.number_of_edges() == 0: + return 0.0, set() + if iterations < 1: + raise ValueError( + f"The number of iterations must be an integer >= 1. Provided: {iterations}" + ) + import numpy as np + + # 1. Node Mapping: Assign a unique index to each node and edge + node_to_idx = {node: idx for idx, node in enumerate(G)} + num_nodes = G.number_of_nodes() + num_undirected_edges = G.number_of_edges() + + # 2. Edge Mapping: Assign a unique index to each bidirectional edge + bidirectional_edges = [(u, v) for u, v in G.edges] + [(v, u) for u, v in G.edges] + edge_to_idx = {edge: idx for idx, edge in enumerate(bidirectional_edges)} + + num_edges = len(bidirectional_edges) + + # 3. Reverse Edge Mapping: Map each (bidirectional) edge to its reverse edge index + reverse_edge_idx = np.empty(num_edges, dtype=np.int32) + for idx in range(num_undirected_edges): + reverse_edge_idx[idx] = num_undirected_edges + idx + for idx in range(num_undirected_edges, 2 * num_undirected_edges): + reverse_edge_idx[idx] = idx - num_undirected_edges + + # 4. Initialize Variables as NumPy Arrays + x = np.full(num_edges, 0.5, dtype=np.float32) + y = x.copy() + z = np.zeros(num_edges, dtype=np.float32) + b = np.zeros(num_nodes, dtype=np.float32) # Induced load vector + tk = 1.0 # Momentum term + + # 5. Precompute Edge Source Indices + edge_src_indices = np.array( + [node_to_idx[u] for u, _ in bidirectional_edges], dtype=np.int32 + ) + + # 6. Compute Learning Rate + max_degree = max(deg for _, deg in G.degree) + # 0.9 for floating point errs when max_degree is very large + learning_rate = 0.9 / max_degree + + # 7. Iterative Updates + for _ in range(iterations): + # 7a. Update b: sum y over outgoing edges for each node + b[:] = 0.0 # Reset b to zero + np.add.at(b, edge_src_indices, y) # b_u = \sum_{v : (u,v) \in E(G)} y_{uv} + + # 7b. Compute z, z_{uv} = y_{uv} - 2 * learning_rate * b_u + z = y - 2.0 * learning_rate * b[edge_src_indices] + + # 7c. Update Momentum Term + tknew = (1.0 + math.sqrt(1 + 4.0 * tk**2)) / 2.0 + + # 7d. Update x in a vectorized manner, x_{uv} = (z_{uv} - z_{vu} + 1.0) / 2.0 + new_xuv = (z - z[reverse_edge_idx] + 1.0) / 2.0 + clamped_x = np.clip(new_xuv, 0.0, 1.0) # Clamp x_{uv} between 0 and 1 + + # Update y using the FISTA update formula (similar to gradient descent) + y = ( + clamped_x + + ((tk - 1.0) / tknew) * (clamped_x - x) + + (tk / tknew) * (clamped_x - y) + ) + + # Update x + x = clamped_x + + # Update tk, the momemntum term + tk = tknew + + # Rebalance the b values! Otherwise performance is a bit suboptimal. + b[:] = 0.0 + np.add.at(b, edge_src_indices, x) # b_u = \sum_{v : (u,v) \in E(G)} x_{uv} + + # Extract the actual (approximate) dense subgraph. + return _fractional_peeling(G, b, x, node_to_idx, edge_to_idx) + + +ALGORITHMS = {"greedy++": _greedy_plus_plus, "fista": _fista} + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable +def densest_subgraph(G, iterations=1, *, method="fista"): + r"""Returns an approximate densest subgraph for a graph `G`. + + This function runs an iterative algorithm to find the densest subgraph, + and returns both the density and the subgraph. For a discussion on the + notion of density used and the different algorithms available on + networkx, please see the Notes section below. + + Parameters + ---------- + G : NetworkX graph + Undirected graph. + + iterations : int, optional (default=1) + Number of iterations to use for the iterative algorithm. Can be + specified positionally or as a keyword argument. + + method : string, optional (default='fista') + The algorithm to use to approximate the densest subgraph. Supported + options: 'greedy++' by Boob et al. [2]_ and 'fista' by Harb et al. [3]_. + Must be specified as a keyword argument. Other inputs produce a + ValueError. + + Returns + ------- + d : float + The density of the approximate subgraph found. + + S : set + The subset of nodes defining the approximate densest subgraph. + + Examples + -------- + >>> G = nx.star_graph(4) + >>> nx.approximation.densest_subgraph(G, iterations=1) + (0.8, {0, 1, 2, 3, 4}) + + Notes + ----- + **Problem Definition:** + The densest subgraph problem (DSG) asks to find the subgraph + $S \subseteq V(G)$ with maximum density. For a subset of the nodes of + $G$, $S \subseteq V(G)$, define $E(S) = \{ (u,v) : (u,v)\in E(G), + u\in S, v\in S \}$ as the set of edges with both endpoints in $S$. + The density of $S$ is defined as $|E(S)|/|S|$, the ratio between the + edges in the subgraph $G[S]$ and the number of nodes in that subgraph. + Note that this is different from the standard graph theoretic definition + of density, defined as $\frac{2|E(S)|}{|S|(|S|-1)}$, for historical + reasons. + + **Exact Algorithms:** + The densest subgraph problem is polynomial time solvable using maximum + flow, commonly referred to as Goldberg's algorithm. However, the + algorithm is quite involved. It first binary searches on the optimal + density, $d^\ast$. For a guess of the density $d$, it sets up a flow + network $G'$ with size $O(m)$. The maximum flow solution either + informs the algorithm that no subgraph with density $d$ exists, or it + provides a subgraph with density at least $d$. However, this is + inherently bottlenecked by the maximum flow algorithm. For example, [2]_ + notes that Goldberg’s algorithm was not feasible on many large graphs + even though they used a highly optimized maximum flow library. + + **Charikar's Greedy Peeling:** + While exact solution algorithms are quite involved, there are several + known approximation algorithms for the densest subgraph problem. + + Charikar [1]_ described a very simple 1/2-approximation algorithm for DSG + known as the greedy "peeling" algorithm. The algorithm creates an + ordering of the nodes as follows. The first node $v_1$ is the one with + the smallest degree in $G$ (ties broken arbitrarily). It selects + $v_2$ to be the smallest degree node in $G \setminus v_1$. Letting + $G_i$ be the graph after removing $v_1, ..., v_i$ (with $G_0=G$), + the algorithm returns the graph among $G_0, ..., G_n$ with the highest + density. + + **Greedy++:** + Boob et al. [2]_ generalized this algorithm into Greedy++, an iterative + algorithm that runs several rounds of "peeling". In fact, Greedy++ with 1 + iteration is precisely Charikar's algorithm. The algorithm converges to a + $(1-\epsilon)$ approximate densest subgraph in $O(\Delta(G)\log + n/\epsilon^2)$ iterations, where $\Delta(G)$ is the maximum degree, + and $n$ is the number of nodes in $G$. The algorithm also has other + desirable properties as shown by [4]_ and [5]_. + + **FISTA Algorithm:** + Harb et al. [3]_ gave a faster and more scalable algorithm using ideas + from quadratic programming for the densest subgraph, which is based on a + fast iterative shrinkage-thresholding algorithm (FISTA) algorithm. It is + known that computing the densest subgraph can be formulated as the + following convex optimization problem: + + Minimize $\sum_{u \in V(G)} b_u^2$ + + Subject to: + + $b_u = \sum_{v: \{u,v\} \in E(G)} x_{uv}$ for all $u \in V(G)$ + + $x_{uv} + x_{vu} = 1.0$ for all $\{u,v\} \in E(G)$ + + $x_{uv} \geq 0, x_{vu} \geq 0$ for all $\{u,v\} \in E(G)$ + + Here, $x_{uv}$ represents the fraction of edge $\{u,v\}$ assigned to + $u$, and $x_{vu}$ to $v$. + + The FISTA algorithm efficiently solves this convex program using gradient + descent with projections. For a learning rate $\alpha$, the algorithm + does: + + 1. **Initialization**: Set $x^{(0)}_{uv} = x^{(0)}_{vu} = 0.5$ for all + edges as a feasible solution. + + 2. **Gradient Update**: For iteration $k\geq 1$, set + $x^{(k+1)}_{uv} = x^{(k)}_{uv} - 2 \alpha \sum_{v: \{u,v\} \in E(G)} + x^{(k)}_{uv}$. However, now $x^{(k+1)}_{uv}$ might be infeasible! + To ensure feasibility, we project $x^{(k+1)}_{uv}$. + + 3. **Projection to the Feasible Set**: Compute + $b^{(k+1)}_u = \sum_{v: \{u,v\} \in E(G)} x^{(k)}_{uv}$ for all + nodes $u$. Define $z^{(k+1)}_{uv} = x^{(k+1)}_{uv} - 2 \alpha + b^{(k+1)}_u$. Update $x^{(k+1)}_{uv} = + CLAMP((z^{(k+1)}_{uv} - z^{(k+1)}_{vu} + 1.0) / 2.0)$, where + $CLAMP(x) = \max(0, \min(1, x))$. + + With a learning rate of $\alpha=1/\Delta(G)$, where $\Delta(G)$ is + the maximum degree, the algorithm converges to the optimum solution of + the convex program. + + **Fractional Peeling:** + To obtain a **discrete** subgraph, we use fractional peeling, an + adaptation of the standard peeling algorithm which peels the minimum + degree vertex in each iteration, and returns the densest subgraph found + along the way. Here, we instead peel the vertex with the smallest + induced load $b_u$: + + 1. Compute $b_u$ and $x_{uv}$. + + 2. Iteratively remove the vertex with the smallest $b_u$, updating its + neighbors' load by $x_{vu}$. + + Fractional peeling transforms the approximately optimal fractional + values $b_u, x_{uv}$ into a discrete subgraph. Unlike traditional + peeling, which removes the lowest-degree node, this method accounts for + fractional edge contributions from the convex program. + + This approach is both scalable and theoretically sound, ensuring a quick + approximation of the densest subgraph while leveraging fractional load + balancing. + + References + ---------- + .. [1] Charikar, Moses. "Greedy approximation algorithms for finding dense + components in a graph." In International workshop on approximation + algorithms for combinatorial optimization, pp. 84-95. Berlin, Heidelberg: + Springer Berlin Heidelberg, 2000. + + .. [2] Boob, Digvijay, Yu Gao, Richard Peng, Saurabh Sawlani, Charalampos + Tsourakakis, Di Wang, and Junxing Wang. "Flowless: Extracting densest + subgraphs without flow computations." In Proceedings of The Web Conference + 2020, pp. 573-583. 2020. + + .. [3] Harb, Elfarouk, Kent Quanrud, and Chandra Chekuri. "Faster and scalable + algorithms for densest subgraph and decomposition." Advances in Neural + Information Processing Systems 35 (2022): 26966-26979. + + .. [4] Harb, Elfarouk, Kent Quanrud, and Chandra Chekuri. "Convergence to + lexicographically optimal base in a (contra) polymatroid and applications + to densest subgraph and tree packing." arXiv preprint arXiv:2305.02987 + (2023). + + .. [5] Chekuri, Chandra, Kent Quanrud, and Manuel R. Torres. "Densest + subgraph: Supermodularity, iterative peeling, and flow." In Proceedings of + the 2022 Annual ACM-SIAM Symposium on Discrete Algorithms (SODA), pp. + 1531-1555. Society for Industrial and Applied Mathematics, 2022. + """ + try: + algo = ALGORITHMS[method] + except KeyError as e: + raise ValueError(f"{method} is not a valid choice for an algorithm.") from e + + return algo(G, iterations) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py new file mode 100644 index 0000000..d5847e6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/distance_measures.py @@ -0,0 +1,150 @@ +"""Distance measures approximated metrics.""" + +import networkx as nx +from networkx.utils.decorators import py_random_state + +__all__ = ["diameter"] + + +@py_random_state(1) +@nx._dispatchable(name="approximate_diameter") +def diameter(G, seed=None): + """Returns a lower bound on the diameter of the graph G. + + The function computes a lower bound on the diameter (i.e., the maximum eccentricity) + of a directed or undirected graph G. The procedure used varies depending on the graph + being directed or not. + + If G is an `undirected` graph, then the function uses the `2-sweep` algorithm [1]_. + The main idea is to pick the farthest node from a random node and return its eccentricity. + + Otherwise, if G is a `directed` graph, the function uses the `2-dSweep` algorithm [2]_, + The procedure starts by selecting a random source node $s$ from which it performs a + forward and a backward BFS. Let $a_1$ and $a_2$ be the farthest nodes in the forward and + backward cases, respectively. Then, it computes the backward eccentricity of $a_1$ using + a backward BFS and the forward eccentricity of $a_2$ using a forward BFS. + Finally, it returns the best lower bound between the two. + + In both cases, the time complexity is linear with respect to the size of G. + + Parameters + ---------- + G : NetworkX graph + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + d : integer + Lower Bound on the Diameter of G + + Examples + -------- + >>> G = nx.path_graph(10) # undirected graph + >>> nx.diameter(G) + 9 + >>> G = nx.cycle_graph(3, create_using=nx.DiGraph) # directed graph + >>> nx.diameter(G) + 2 + + Raises + ------ + NetworkXError + If the graph is empty or + If the graph is undirected and not connected or + If the graph is directed and not strongly connected. + + See Also + -------- + networkx.algorithms.distance_measures.diameter + + References + ---------- + .. [1] Magnien, Clémence, Matthieu Latapy, and Michel Habib. + *Fast computation of empirically tight bounds for the diameter of massive graphs.* + Journal of Experimental Algorithmics (JEA), 2009. + https://arxiv.org/pdf/0904.2728.pdf + .. [2] Crescenzi, Pierluigi, Roberto Grossi, Leonardo Lanzi, and Andrea Marino. + *On computing the diameter of real-world directed (weighted) graphs.* + International Symposium on Experimental Algorithms. Springer, Berlin, Heidelberg, 2012. + https://courses.cs.ut.ee/MTAT.03.238/2014_fall/uploads/Main/diameter.pdf + """ + # if G is empty + if not G: + raise nx.NetworkXError("Expected non-empty NetworkX graph!") + # if there's only a node + if G.number_of_nodes() == 1: + return 0 + # if G is directed + if G.is_directed(): + return _two_sweep_directed(G, seed) + # else if G is undirected + return _two_sweep_undirected(G, seed) + + +def _two_sweep_undirected(G, seed): + """Helper function for finding a lower bound on the diameter + for undirected Graphs. + + The idea is to pick the farthest node from a random node + and return its eccentricity. + + ``G`` is a NetworkX undirected graph. + + .. note:: + + ``seed`` is a random.Random or numpy.random.RandomState instance + """ + # select a random source node + source = seed.choice(list(G)) + # get the distances to the other nodes + distances = nx.shortest_path_length(G, source) + # if some nodes have not been visited, then the graph is not connected + if len(distances) != len(G): + raise nx.NetworkXError("Graph not connected.") + # take a node that is (one of) the farthest nodes from the source + *_, node = distances + # return the eccentricity of the node + return nx.eccentricity(G, node) + + +def _two_sweep_directed(G, seed): + """Helper function for finding a lower bound on the diameter + for directed Graphs. + + It implements 2-dSweep, the directed version of the 2-sweep algorithm. + The algorithm follows the following steps. + 1. Select a source node $s$ at random. + 2. Perform a forward BFS from $s$ to select a node $a_1$ at the maximum + distance from the source, and compute $LB_1$, the backward eccentricity of $a_1$. + 3. Perform a backward BFS from $s$ to select a node $a_2$ at the maximum + distance from the source, and compute $LB_2$, the forward eccentricity of $a_2$. + 4. Return the maximum between $LB_1$ and $LB_2$. + + ``G`` is a NetworkX directed graph. + + .. note:: + + ``seed`` is a random.Random or numpy.random.RandomState instance + """ + # get a new digraph G' with the edges reversed in the opposite direction + G_reversed = G.reverse() + # select a random source node + source = seed.choice(list(G)) + # compute forward distances from source + forward_distances = nx.shortest_path_length(G, source) + # compute backward distances from source + backward_distances = nx.shortest_path_length(G_reversed, source) + # if either the source can't reach every node or not every node + # can reach the source, then the graph is not strongly connected + n = len(G) + if len(forward_distances) != n or len(backward_distances) != n: + raise nx.NetworkXError("DiGraph not strongly connected.") + # take a node a_1 at the maximum distance from the source in G + *_, a_1 = forward_distances + # take a node a_2 at the maximum distance from the source in G_reversed + *_, a_2 = backward_distances + # return the max between the backward eccentricity of a_1 and the forward eccentricity of a_2 + return max(nx.eccentricity(G_reversed, a_1), nx.eccentricity(G, a_2)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py new file mode 100644 index 0000000..e568a82 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/dominating_set.py @@ -0,0 +1,149 @@ +"""Functions for finding node and edge dominating sets. + +A `dominating set`_ for an undirected graph *G* with vertex set *V* +and edge set *E* is a subset *D* of *V* such that every vertex not in +*D* is adjacent to at least one member of *D*. An `edge dominating set`_ +is a subset *F* of *E* such that every edge not in *F* is +incident to an endpoint of at least one edge in *F*. + +.. _dominating set: https://en.wikipedia.org/wiki/Dominating_set +.. _edge dominating set: https://en.wikipedia.org/wiki/Edge_dominating_set + +""" + +import networkx as nx + +from ...utils import not_implemented_for +from ..matching import maximal_matching + +__all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"] + + +# TODO Why doesn't this algorithm work for directed graphs? +@not_implemented_for("directed") +@nx._dispatchable(node_attrs="weight") +def min_weighted_dominating_set(G, weight=None): + r"""Returns a dominating set that approximates the minimum weight node + dominating set. + + Parameters + ---------- + G : NetworkX graph + Undirected graph. + + weight : string + The node attribute storing the weight of an node. If provided, + the node attribute with this key must be a number for each + node. If not provided, each node is assumed to have weight one. + + Returns + ------- + min_weight_dominating_set : set + A set of nodes, the sum of whose weights is no more than `(\log + w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of + each node in the graph and `w(V^*)` denotes the sum of the + weights of each node in the minimum weight dominating set. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 4), (1, 4), (1, 2), (2, 3), (3, 4), (2, 5)]) + >>> nx.approximation.min_weighted_dominating_set(G) + {1, 2, 4} + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + Notes + ----- + This algorithm computes an approximate minimum weighted dominating + set for the graph `G`. The returned solution has weight `(\log + w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of each + node in the graph and `w(V^*)` denotes the sum of the weights of + each node in the minimum weight dominating set for the graph. + + This implementation of the algorithm runs in $O(m)$ time, where $m$ + is the number of edges in the graph. + + References + ---------- + .. [1] Vazirani, Vijay V. + *Approximation Algorithms*. + Springer Science & Business Media, 2001. + + """ + # The unique dominating set for the null graph is the empty set. + if len(G) == 0: + return set() + + # This is the dominating set that will eventually be returned. + dom_set = set() + + def _cost(node_and_neighborhood): + """Returns the cost-effectiveness of greedily choosing the given + node. + + `node_and_neighborhood` is a two-tuple comprising a node and its + closed neighborhood. + + """ + v, neighborhood = node_and_neighborhood + return G.nodes[v].get(weight, 1) / len(neighborhood - dom_set) + + # This is a set of all vertices not already covered by the + # dominating set. + vertices = set(G) + # This is a dictionary mapping each node to the closed neighborhood + # of that node. + neighborhoods = {v: {v} | set(G[v]) for v in G} + + # Continue until all vertices are adjacent to some node in the + # dominating set. + while vertices: + # Find the most cost-effective node to add, along with its + # closed neighborhood. + dom_node, min_set = min(neighborhoods.items(), key=_cost) + # Add the node to the dominating set and reduce the remaining + # set of nodes to cover. + dom_set.add(dom_node) + del neighborhoods[dom_node] + vertices -= min_set + + return dom_set + + +@nx._dispatchable +def min_edge_dominating_set(G): + r"""Returns minimum cardinality edge dominating set. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + min_edge_dominating_set : set + Returns a set of dominating edges whose size is no more than 2 * OPT. + + Examples + -------- + >>> G = nx.petersen_graph() + >>> nx.approximation.min_edge_dominating_set(G) + {(0, 1), (4, 9), (6, 8), (5, 7), (2, 3)} + + Raises + ------ + ValueError + If the input graph `G` is empty. + + Notes + ----- + The algorithm computes an approximate solution to the edge dominating set + problem. The result is no more than 2 * OPT in terms of size of the set. + Runtime of the algorithm is $O(|E|)$. + """ + if not G: + raise ValueError("Expected non-empty NetworkX graph!") + return maximal_matching(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py new file mode 100644 index 0000000..fe52e39 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/kcomponents.py @@ -0,0 +1,367 @@ +"""Fast approximation for k-component structure""" + +import itertools +from collections import defaultdict +from collections.abc import Mapping +from functools import cached_property + +import networkx as nx +from networkx.algorithms.approximation import local_node_connectivity +from networkx.exception import NetworkXError +from networkx.utils import not_implemented_for + +__all__ = ["k_components"] + + +@not_implemented_for("directed") +@nx._dispatchable(name="approximate_k_components") +def k_components(G, min_density=0.95): + r"""Returns the approximate k-component structure of a graph G. + + A `k`-component is a maximal subgraph of a graph G that has, at least, + node connectivity `k`: we need to remove at least `k` nodes to break it + into more components. `k`-components have an inherent hierarchical + structure because they are nested in terms of connectivity: a connected + graph can contain several 2-components, each of which can contain + one or more 3-components, and so forth. + + This implementation is based on the fast heuristics to approximate + the `k`-component structure of a graph [1]_. Which, in turn, it is based on + a fast approximation algorithm for finding good lower bounds of the number + of node independent paths between two nodes [2]_. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + min_density : Float + Density relaxation threshold. Default value 0.95 + + Returns + ------- + k_components : dict + Dictionary with connectivity level `k` as key and a list of + sets of nodes that form a k-component of level `k` as values. + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + Examples + -------- + >>> # Petersen graph has 10 nodes and it is triconnected, thus all + >>> # nodes are in a single component on all three connectivity levels + >>> from networkx.algorithms import approximation as apxa + >>> G = nx.petersen_graph() + >>> k_components = apxa.k_components(G) + + Notes + ----- + The logic of the approximation algorithm for computing the `k`-component + structure [1]_ is based on repeatedly applying simple and fast algorithms + for `k`-cores and biconnected components in order to narrow down the + number of pairs of nodes over which we have to compute White and Newman's + approximation algorithm for finding node independent paths [2]_. More + formally, this algorithm is based on Whitney's theorem, which states + an inclusion relation among node connectivity, edge connectivity, and + minimum degree for any graph G. This theorem implies that every + `k`-component is nested inside a `k`-edge-component, which in turn, + is contained in a `k`-core. Thus, this algorithm computes node independent + paths among pairs of nodes in each biconnected part of each `k`-core, + and repeats this procedure for each `k` from 3 to the maximal core number + of a node in the input graph. + + Because, in practice, many nodes of the core of level `k` inside a + bicomponent actually are part of a component of level k, the auxiliary + graph needed for the algorithm is likely to be very dense. Thus, we use + a complement graph data structure (see `AntiGraph`) to save memory. + AntiGraph only stores information of the edges that are *not* present + in the actual auxiliary graph. When applying algorithms to this + complement graph data structure, it behaves as if it were the dense + version. + + See also + -------- + k_components + + References + ---------- + .. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion: + Visualization and Heuristics for Fast Computation. + https://arxiv.org/pdf/1503.04476v1 + + .. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for + Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035 + https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind + + .. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness: + A hierarchical conception of social groups. + American Sociological Review 68(1), 103--28. + https://doi.org/10.2307/3088904 + + """ + # Dictionary with connectivity level (k) as keys and a list of + # sets of nodes that form a k-component as values + k_components = defaultdict(list) + # make a few functions local for speed + node_connectivity = local_node_connectivity + k_core = nx.k_core + core_number = nx.core_number + biconnected_components = nx.biconnected_components + combinations = itertools.combinations + # Exact solution for k = {1,2} + # There is a linear time algorithm for triconnectivity, if we had an + # implementation available we could start from k = 4. + for component in nx.connected_components(G): + # isolated nodes have connectivity 0 + comp = set(component) + if len(comp) > 1: + k_components[1].append(comp) + for bicomponent in nx.biconnected_components(G): + # avoid considering dyads as bicomponents + bicomp = set(bicomponent) + if len(bicomp) > 2: + k_components[2].append(bicomp) + # There is no k-component of k > maximum core number + # \kappa(G) <= \lambda(G) <= \delta(G) + g_cnumber = core_number(G) + max_core = max(g_cnumber.values()) + for k in range(3, max_core + 1): + C = k_core(G, k, core_number=g_cnumber) + for nodes in biconnected_components(C): + # Build a subgraph SG induced by the nodes that are part of + # each biconnected component of the k-core subgraph C. + if len(nodes) < k: + continue + SG = G.subgraph(nodes) + # Build auxiliary graph + H = _AntiGraph() + H.add_nodes_from(SG.nodes()) + for u, v in combinations(SG, 2): + K = node_connectivity(SG, u, v, cutoff=k) + if k > K: + H.add_edge(u, v) + for h_nodes in biconnected_components(H): + if len(h_nodes) <= k: + continue + SH = H.subgraph(h_nodes) + for Gc in _cliques_heuristic(SG, SH, k, min_density): + for k_nodes in biconnected_components(Gc): + Gk = nx.k_core(SG.subgraph(k_nodes), k) + if len(Gk) <= k: + continue + k_components[k].append(set(Gk)) + return k_components + + +def _cliques_heuristic(G, H, k, min_density): + h_cnumber = nx.core_number(H) + for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)): + cands = {n for n, c in h_cnumber.items() if c == c_value} + # Skip checking for overlap for the highest core value + if i == 0: + overlap = False + else: + overlap = set.intersection( + *[{x for x in H[n] if x not in cands} for n in cands] + ) + if overlap and len(overlap) < k: + SH = H.subgraph(cands | overlap) + else: + SH = H.subgraph(cands) + sh_cnumber = nx.core_number(SH) + SG = nx.k_core(G.subgraph(SH), k) + while not (_same(sh_cnumber) and nx.density(SH) >= min_density): + # This subgraph must be writable => .copy() + SH = H.subgraph(SG).copy() + if len(SH) <= k: + break + sh_cnumber = nx.core_number(SH) + sh_deg = dict(SH.degree()) + min_deg = min(sh_deg.values()) + SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg) + SG = nx.k_core(G.subgraph(SH), k) + else: + yield SG + + +def _same(measure, tol=0): + vals = set(measure.values()) + if (max(vals) - min(vals)) <= tol: + return True + return False + + +class _AntiGraph(nx.Graph): + """ + Class for complement graphs. + + The main goal is to be able to work with big and dense graphs with + a low memory footprint. + + In this class you add the edges that *do not exist* in the dense graph, + the report methods of the class return the neighbors, the edges and + the degree as if it was the dense graph. Thus it's possible to use + an instance of this class with some of NetworkX functions. In this + case we only use k-core, connected_components, and biconnected_components. + """ + + all_edge_dict = {"weight": 1} + + def single_edge_dict(self): + return self.all_edge_dict + + edge_attr_dict_factory = single_edge_dict # type: ignore[assignment] + + def __getitem__(self, n): + """Returns a dict of neighbors of node n in the dense graph. + + Parameters + ---------- + n : node + A node in the graph. + + Returns + ------- + adj_dict : dictionary + The adjacency dictionary for nodes connected to n. + + """ + all_edge_dict = self.all_edge_dict + return dict.fromkeys(set(self._adj) - set(self._adj[n]) - {n}, all_edge_dict) + + def neighbors(self, n): + """Returns an iterator over all neighbors of node n in the + dense graph. + """ + try: + return iter(set(self._adj) - set(self._adj[n]) - {n}) + except KeyError as err: + raise NetworkXError(f"The node {n} is not in the graph.") from err + + class AntiAtlasView(Mapping): + """An adjacency inner dict for AntiGraph""" + + def __init__(self, graph, node): + self._graph = graph + self._atlas = graph._adj[node] + self._node = node + + def __len__(self): + return len(self._graph) - len(self._atlas) - 1 + + def __iter__(self): + return (n for n in self._graph if n not in self._atlas and n != self._node) + + def __getitem__(self, nbr): + nbrs = set(self._graph._adj) - set(self._atlas) - {self._node} + if nbr in nbrs: + return self._graph.all_edge_dict + raise KeyError(nbr) + + class AntiAdjacencyView(AntiAtlasView): + """An adjacency outer dict for AntiGraph""" + + def __init__(self, graph): + self._graph = graph + self._atlas = graph._adj + + def __len__(self): + return len(self._atlas) + + def __iter__(self): + return iter(self._graph) + + def __getitem__(self, node): + if node not in self._graph: + raise KeyError(node) + return self._graph.AntiAtlasView(self._graph, node) + + @cached_property + def adj(self): + return self.AntiAdjacencyView(self) + + def subgraph(self, nodes): + """This subgraph method returns a full AntiGraph. Not a View""" + nodes = set(nodes) + G = _AntiGraph() + G.add_nodes_from(nodes) + for n in G: + Gnbrs = G.adjlist_inner_dict_factory() + G._adj[n] = Gnbrs + for nbr, d in self._adj[n].items(): + if nbr in G._adj: + Gnbrs[nbr] = d + G._adj[nbr][n] = d + G.graph = self.graph + return G + + class AntiDegreeView(nx.reportviews.DegreeView): + def __iter__(self): + all_nodes = set(self._succ) + for n in self._nodes: + nbrs = all_nodes - set(self._succ[n]) - {n} + yield (n, len(nbrs)) + + def __getitem__(self, n): + nbrs = set(self._succ) - set(self._succ[n]) - {n} + # AntiGraph is a ThinGraph so all edges have weight 1 + return len(nbrs) + (n in nbrs) + + @cached_property + def degree(self): + """Returns an iterator for (node, degree) and degree for single node. + + The node degree is the number of edges adjacent to the node. + + Parameters + ---------- + nbunch : iterable container, optional (default=all nodes) + A container of nodes. The container will be iterated + through once. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + deg: + Degree of the node, if a single node is passed as argument. + nd_iter : an iterator + The iterator returns two-tuples of (node, degree). + + See Also + -------- + degree + + Examples + -------- + >>> G = nx.path_graph(4) + >>> G.degree(0) # node 0 with degree 1 + 1 + >>> list(G.degree([0, 1])) + [(0, 1), (1, 2)] + + """ + return self.AntiDegreeView(self) + + def adjacency(self): + """Returns an iterator of (node, adjacency set) tuples for all nodes + in the dense graph. + + This is the fastest way to look at every edge. + For directed graphs, only outgoing adjacencies are included. + + Returns + ------- + adj_iter : iterator + An iterator of (node, adjacency set) for all nodes in + the graph. + + """ + for n in self._adj: + yield (n, set(self._adj) - set(self._adj[n]) - {n}) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py new file mode 100644 index 0000000..dc08919 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/matching.py @@ -0,0 +1,44 @@ +""" +************** +Graph Matching +************** + +Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent +edges; that is, no two edges share a common vertex. + +`Wikipedia: Matching `_ +""" + +import networkx as nx + +__all__ = ["min_maximal_matching"] + + +@nx._dispatchable +def min_maximal_matching(G): + r"""Returns the minimum maximal matching of G. That is, out of all maximal + matchings of the graph G, the smallest is returned. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + min_maximal_matching : set + Returns a set of edges such that no two edges share a common endpoint + and every edge not in the set shares some common endpoint in the set. + Cardinality will be 2*OPT in the worst case. + + Notes + ----- + The algorithm computes an approximate solution for the minimum maximal + cardinality matching problem. The solution is no more than 2 * OPT in size. + Runtime is $O(|E|)$. + + References + ---------- + .. [1] Vazirani, Vijay Approximation Algorithms (2001) + """ + return nx.maximal_matching(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py new file mode 100644 index 0000000..f4e1da8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/maxcut.py @@ -0,0 +1,143 @@ +import networkx as nx +from networkx.utils.decorators import not_implemented_for, py_random_state + +__all__ = ["randomized_partitioning", "one_exchange"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(1) +@nx._dispatchable(edge_attrs="weight") +def randomized_partitioning(G, seed=None, p=0.5, weight=None): + """Compute a random partitioning of the graph nodes and its cut value. + + A partitioning is calculated by observing each node + and deciding to add it to the partition with probability `p`, + returning a random cut and its corresponding value (the + sum of weights of edges connecting different partitions). + + Parameters + ---------- + G : NetworkX graph + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + p : scalar + Probability for each node to be part of the first partition. + Should be in [0,1] + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + cut_size : scalar + Value of the minimum cut. + + partition : pair of node sets + A partitioning of the nodes that defines a minimum cut. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> cut_size, partition = nx.approximation.randomized_partitioning(G, seed=1) + >>> cut_size + 6 + >>> partition + ({0, 3, 4}, {1, 2}) + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + """ + cut = {node for node in G.nodes() if seed.random() < p} + cut_size = nx.algorithms.cut_size(G, cut, weight=weight) + partition = (cut, G.nodes - cut) + return cut_size, partition + + +def _swap_node_partition(cut, node): + return cut - {node} if node in cut else cut.union({node}) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(2) +@nx._dispatchable(edge_attrs="weight") +def one_exchange(G, initial_cut=None, seed=None, weight=None): + """Compute a partitioning of the graphs nodes and the corresponding cut value. + + Use a greedy one exchange strategy to find a locally maximal cut + and its value, it works by finding the best node (one that gives + the highest gain to the cut value) to add to the current cut + and repeats this process until no improvement can be made. + + Parameters + ---------- + G : networkx Graph + Graph to find a maximum cut for. + + initial_cut : set + Cut to use as a starting point. If not supplied the algorithm + starts with an empty cut. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + cut_value : scalar + Value of the maximum cut. + + partition : pair of node sets + A partitioning of the nodes that defines a maximum cut. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> curr_cut_size, partition = nx.approximation.one_exchange(G, seed=1) + >>> curr_cut_size + 6 + >>> partition + ({0, 2}, {1, 3, 4}) + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + """ + if initial_cut is None: + initial_cut = set() + cut = set(initial_cut) + current_cut_size = nx.algorithms.cut_size(G, cut, weight=weight) + while True: + nodes = list(G.nodes()) + # Shuffling the nodes ensures random tie-breaks in the following call to max + seed.shuffle(nodes) + best_node_to_swap = max( + nodes, + key=lambda v: nx.algorithms.cut_size( + G, _swap_node_partition(cut, v), weight=weight + ), + default=None, + ) + potential_cut = _swap_node_partition(cut, best_node_to_swap) + potential_cut_size = nx.algorithms.cut_size(G, potential_cut, weight=weight) + + if potential_cut_size > current_cut_size: + cut = potential_cut + current_cut_size = potential_cut_size + else: + break + + partition = (cut, G.nodes - cut) + return current_cut_size, partition diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py new file mode 100644 index 0000000..0552e4a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/ramsey.py @@ -0,0 +1,53 @@ +""" +Ramsey numbers. +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +from ...utils import arbitrary_element + +__all__ = ["ramsey_R2"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def ramsey_R2(G): + r"""Compute the largest clique and largest independent set in `G`. + + This can be used to estimate bounds for the 2-color + Ramsey number `R(2;s,t)` for `G`. + + This is a recursive implementation which could run into trouble + for large recursions. Note that self-loop edges are ignored. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + max_pair : (set, set) tuple + Maximum clique, Maximum independent set. + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + """ + if not G: + return set(), set() + + node = arbitrary_element(G) + nbrs = (nbr for nbr in nx.all_neighbors(G, node) if nbr != node) + nnbrs = nx.non_neighbors(G, node) + c_1, i_1 = ramsey_R2(G.subgraph(nbrs).copy()) + c_2, i_2 = ramsey_R2(G.subgraph(nnbrs).copy()) + + c_1.add(node) + i_2.add(node) + # Choose the larger of the two cliques and the larger of the two + # independent sets, according to cardinality. + return max(c_1, c_2, key=len), max(i_1, i_2, key=len) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py new file mode 100644 index 0000000..4bee11c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/steinertree.py @@ -0,0 +1,248 @@ +from itertools import chain + +import networkx as nx +from networkx.utils import not_implemented_for, pairwise + +__all__ = ["metric_closure", "steiner_tree"] + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight", returns_graph=True) +def metric_closure(G, weight="weight"): + """Return the metric closure of a graph. + + The metric closure of a graph *G* is the complete graph in which each edge + is weighted by the shortest path distance between the nodes in *G* . + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + NetworkX graph + Metric closure of the graph `G`. + + """ + M = nx.Graph() + + Gnodes = set(G) + + # check for connected graph while processing first node + all_paths_iter = nx.all_pairs_dijkstra(G, weight=weight) + u, (distance, path) = next(all_paths_iter) + if len(G) != len(distance): + msg = "G is not a connected graph. metric_closure is not defined." + raise nx.NetworkXError(msg) + Gnodes.remove(u) + for v in Gnodes: + M.add_edge(u, v, distance=distance[v], path=path[v]) + + # first node done -- now process the rest + for u, (distance, path) in all_paths_iter: + Gnodes.remove(u) + for v in Gnodes: + M.add_edge(u, v, distance=distance[v], path=path[v]) + + return M + + +def _mehlhorn_steiner_tree(G, terminal_nodes, weight): + paths = nx.multi_source_dijkstra_path(G, terminal_nodes) + + d_1 = {} + s = {} + for v in G.nodes(): + s[v] = paths[v][0] + d_1[(v, s[v])] = len(paths[v]) - 1 + + # G1-G4 names match those from the Mehlhorn 1988 paper. + G_1_prime = nx.Graph() + for u, v, data in G.edges(data=True): + su, sv = s[u], s[v] + weight_here = d_1[(u, su)] + data.get(weight, 1) + d_1[(v, sv)] + if not G_1_prime.has_edge(su, sv): + G_1_prime.add_edge(su, sv, weight=weight_here) + else: + new_weight = min(weight_here, G_1_prime[su][sv]["weight"]) + G_1_prime.add_edge(su, sv, weight=new_weight) + + G_2 = nx.minimum_spanning_edges(G_1_prime, data=True) + + G_3 = nx.Graph() + for u, v, d in G_2: + path = nx.shortest_path(G, u, v, weight) + for n1, n2 in pairwise(path): + G_3.add_edge(n1, n2) + + G_3_mst = list(nx.minimum_spanning_edges(G_3, data=False)) + if G.is_multigraph(): + G_3_mst = ( + (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in G_3_mst + ) + G_4 = G.edge_subgraph(G_3_mst).copy() + _remove_nonterminal_leaves(G_4, terminal_nodes) + return G_4.edges() + + +def _kou_steiner_tree(G, terminal_nodes, weight): + # Compute the metric closure only for terminal nodes + # Create a complete graph H from the metric edges + H = nx.Graph() + unvisited_terminals = set(terminal_nodes) + + # check for connected graph while processing first node + u = unvisited_terminals.pop() + distances, paths = nx.single_source_dijkstra(G, source=u, weight=weight) + if len(G) != len(distances): + msg = "G is not a connected graph." + raise nx.NetworkXError(msg) + for v in unvisited_terminals: + H.add_edge(u, v, distance=distances[v], path=paths[v]) + + # first node done -- now process the rest + for u in unvisited_terminals.copy(): + distances, paths = nx.single_source_dijkstra(G, source=u, weight=weight) + unvisited_terminals.remove(u) + for v in unvisited_terminals: + H.add_edge(u, v, distance=distances[v], path=paths[v]) + + # Use the 'distance' attribute of each edge provided by H. + mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True) + + # Create an iterator over each edge in each shortest path; repeats are okay + mst_all_edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges) + if G.is_multigraph(): + mst_all_edges = ( + (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) + for u, v in mst_all_edges + ) + + # Find the MST again, over this new set of edges + G_S = G.edge_subgraph(mst_all_edges) + T_S = nx.minimum_spanning_edges(G_S, weight="weight", data=False) + + # Leaf nodes that are not terminal might still remain; remove them here + T_H = G.edge_subgraph(T_S).copy() + _remove_nonterminal_leaves(T_H, terminal_nodes) + + return T_H.edges() + + +def _remove_nonterminal_leaves(G, terminals): + terminal_set = set(terminals) + leaves = {n for n in G if len(set(G[n]) - {n}) == 1} + nonterminal_leaves = leaves - terminal_set + + while nonterminal_leaves: + # Removing a node may create new non-terminal leaves, so we limit + # search for candidate non-terminal nodes to neighbors of current + # non-terminal nodes + candidate_leaves = set.union(*(set(G[n]) for n in nonterminal_leaves)) + candidate_leaves -= nonterminal_leaves | terminal_set + # Remove current set of non-terminal nodes + G.remove_nodes_from(nonterminal_leaves) + # Find any new non-terminal nodes from the set of candidates + leaves = {n for n in candidate_leaves if len(set(G[n]) - {n}) == 1} + nonterminal_leaves = leaves - terminal_set + + +ALGORITHMS = { + "kou": _kou_steiner_tree, + "mehlhorn": _mehlhorn_steiner_tree, +} + + +@not_implemented_for("directed") +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def steiner_tree(G, terminal_nodes, weight="weight", method=None): + r"""Return an approximation to the minimum Steiner tree of a graph. + + The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes` (also *S*) + is a tree within `G` that spans those nodes and has minimum size (sum of + edge weights) among all such trees. + + The approximation algorithm is specified with the `method` keyword + argument. All three available algorithms produce a tree whose weight is + within a ``(2 - (2 / l))`` factor of the weight of the optimal Steiner tree, + where ``l`` is the minimum number of leaf nodes across all possible Steiner + trees. + + * ``"kou"`` [2]_ (runtime $O(|S| |V|^2)$) computes the minimum spanning tree of + the subgraph of the metric closure of *G* induced by the terminal nodes, + where the metric closure of *G* is the complete graph in which each edge is + weighted by the shortest path distance between the nodes in *G*. + + * ``"mehlhorn"`` [3]_ (runtime $O(|E|+|V|\log|V|)$) modifies Kou et al.'s + algorithm, beginning by finding the closest terminal node for each + non-terminal. This data is used to create a complete graph containing only + the terminal nodes, in which edge is weighted with the shortest path + distance between them. The algorithm then proceeds in the same way as Kou + et al.. + + Parameters + ---------- + G : NetworkX graph + + terminal_nodes : list + A list of terminal nodes for which minimum steiner tree is + to be found. + + weight : string (default = 'weight') + Use the edge attribute specified by this string as the edge weight. + Any edge attribute not present defaults to 1. + + method : string, optional (default = 'mehlhorn') + The algorithm to use to approximate the Steiner tree. + Supported options: 'kou', 'mehlhorn'. + Other inputs produce a ValueError. + + Returns + ------- + NetworkX graph + Approximation to the minimum steiner tree of `G` induced by + `terminal_nodes` . + + Raises + ------ + NetworkXNotImplemented + If `G` is directed. + + ValueError + If the specified `method` is not supported. + + Notes + ----- + For multigraphs, the edge between two nodes with minimum weight is the + edge put into the Steiner tree. + + + References + ---------- + .. [1] Steiner_tree_problem on Wikipedia. + https://en.wikipedia.org/wiki/Steiner_tree_problem + .. [2] Kou, L., G. Markowsky, and L. Berman. 1981. + ‘A Fast Algorithm for Steiner Trees’. + Acta Informatica 15 (2): 141–45. + https://doi.org/10.1007/BF00288961. + .. [3] Mehlhorn, Kurt. 1988. + ‘A Faster Approximation Algorithm for the Steiner Problem in Graphs’. + Information Processing Letters 27 (3): 125–28. + https://doi.org/10.1016/0020-0190(88)90066-X. + """ + if method is None: + method = "mehlhorn" + + try: + algo = ALGORITHMS[method] + except KeyError as e: + raise ValueError(f"{method} is not a valid choice for an algorithm.") from e + + edges = algo(G, terminal_nodes, weight) + # For multigraph we should add the minimal weight edge keys + if G.is_multigraph(): + edges = ( + (u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in edges + ) + T = G.edge_subgraph(edges) + return T diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b1f63d1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-312.pyc new file mode 100644 index 0000000..008b3b7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_approx_clust_coeff.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-312.pyc new file mode 100644 index 0000000..be73c6b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_clique.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-312.pyc new file mode 100644 index 0000000..586f21b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_connectivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_density.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_density.cpython-312.pyc new file mode 100644 index 0000000..abc1758 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_density.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-312.pyc new file mode 100644 index 0000000..8172c9d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_distance_measures.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-312.pyc new file mode 100644 index 0000000..6323a73 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_dominating_set.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-312.pyc new file mode 100644 index 0000000..db785cb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_kcomponents.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_matching.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_matching.cpython-312.pyc new file mode 100644 index 0000000..ce3d8b4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_matching.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-312.pyc new file mode 100644 index 0000000..18824e5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_maxcut.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-312.pyc new file mode 100644 index 0000000..3009c3b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_ramsey.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-312.pyc new file mode 100644 index 0000000..adde4dd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_steinertree.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-312.pyc new file mode 100644 index 0000000..5d07e2b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_traveling_salesman.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-312.pyc new file mode 100644 index 0000000..a6a7b59 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_treewidth.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-312.pyc new file mode 100644 index 0000000..4ee18de Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/__pycache__/test_vertex_cover.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py new file mode 100644 index 0000000..5eab5c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_approx_clust_coeff.py @@ -0,0 +1,41 @@ +import networkx as nx +from networkx.algorithms.approximation import average_clustering + +# This approximation has to be exact in regular graphs +# with no triangles or with all possible triangles. + + +def test_petersen(): + # Actual coefficient is 0 + G = nx.petersen_graph() + assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G) + + +def test_petersen_seed(): + # Actual coefficient is 0 + G = nx.petersen_graph() + assert average_clustering(G, trials=len(G) // 2, seed=1) == nx.average_clustering(G) + + +def test_tetrahedral(): + # Actual coefficient is 1 + G = nx.tetrahedral_graph() + assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G) + + +def test_dodecahedral(): + # Actual coefficient is 0 + G = nx.dodecahedral_graph() + assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G) + + +def test_empty(): + G = nx.empty_graph(5) + assert average_clustering(G, trials=len(G) // 2) == 0 + + +def test_complete(): + G = nx.complete_graph(5) + assert average_clustering(G, trials=len(G) // 2) == 1 + G = nx.complete_graph(7) + assert average_clustering(G, trials=len(G) // 2) == 1 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_clique.py new file mode 100644 index 0000000..b40dcb9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_clique.py @@ -0,0 +1,112 @@ +"""Unit tests for the :mod:`networkx.algorithms.approximation.clique` module.""" + +import networkx as nx +from networkx.algorithms.approximation import ( + clique_removal, + large_clique_size, + max_clique, + maximum_independent_set, +) + + +def is_independent_set(G, nodes): + """Returns True if and only if `nodes` is a clique in `G`. + + `G` is a NetworkX graph. `nodes` is an iterable of nodes in + `G`. + + """ + return G.subgraph(nodes).number_of_edges() == 0 + + +def is_clique(G, nodes): + """Returns True if and only if `nodes` is an independent set + in `G`. + + `G` is an undirected simple graph. `nodes` is an iterable of + nodes in `G`. + + """ + H = G.subgraph(nodes) + n = len(H) + return H.number_of_edges() == n * (n - 1) // 2 + + +class TestCliqueRemoval: + """Unit tests for the + :func:`~networkx.algorithms.approximation.clique_removal` function. + + """ + + def test_trivial_graph(self): + G = nx.trivial_graph() + independent_set, cliques = clique_removal(G) + assert is_independent_set(G, independent_set) + assert all(is_clique(G, clique) for clique in cliques) + # In fact, we should only have 1-cliques, that is, singleton nodes. + assert all(len(clique) == 1 for clique in cliques) + + def test_complete_graph(self): + G = nx.complete_graph(10) + independent_set, cliques = clique_removal(G) + assert is_independent_set(G, independent_set) + assert all(is_clique(G, clique) for clique in cliques) + + def test_barbell_graph(self): + G = nx.barbell_graph(10, 5) + independent_set, cliques = clique_removal(G) + assert is_independent_set(G, independent_set) + assert all(is_clique(G, clique) for clique in cliques) + + +class TestMaxClique: + """Unit tests for the :func:`networkx.algorithms.approximation.max_clique` + function. + + """ + + def test_null_graph(self): + G = nx.null_graph() + assert len(max_clique(G)) == 0 + + def test_complete_graph(self): + graph = nx.complete_graph(30) + # this should return the entire graph + mc = max_clique(graph) + assert 30 == len(mc) + + def test_maximal_by_cardinality(self): + """Tests that the maximal clique is computed according to maximum + cardinality of the sets. + + For more information, see pull request #1531. + + """ + G = nx.complete_graph(5) + G.add_edge(4, 5) + clique = max_clique(G) + assert len(clique) > 1 + + G = nx.lollipop_graph(30, 2) + clique = max_clique(G) + assert len(clique) > 2 + + +def test_large_clique_size(): + G = nx.complete_graph(9) + nx.add_cycle(G, [9, 10, 11]) + G.add_edge(8, 9) + G.add_edge(1, 12) + G.add_node(13) + + assert large_clique_size(G) == 9 + G.remove_node(5) + assert large_clique_size(G) == 8 + G.remove_edge(2, 3) + assert large_clique_size(G) == 7 + + +def test_independent_set(): + # smoke test + G = nx.Graph() + assert len(maximum_independent_set(G)) == 0 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py new file mode 100644 index 0000000..887db20 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_connectivity.py @@ -0,0 +1,199 @@ +import pytest + +import networkx as nx +from networkx.algorithms import approximation as approx + + +def test_global_node_connectivity(): + # Figure 1 chapter on Connectivity + G = nx.Graph() + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) + assert 2 == approx.local_node_connectivity(G, 1, 11) + assert 2 == approx.node_connectivity(G) + assert 2 == approx.node_connectivity(G, 1, 11) + + +def test_white_harary1(): + # Figure 1b white and harary (2001) + # A graph with high adhesion (edge connectivity) and low cohesion + # (node connectivity) + G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4)) + G.remove_node(7) + for i in range(4, 7): + G.add_edge(0, i) + G = nx.disjoint_union(G, nx.complete_graph(4)) + G.remove_node(G.order() - 1) + for i in range(7, 10): + G.add_edge(0, i) + assert 1 == approx.node_connectivity(G) + + +def test_complete_graphs(): + for n in range(5, 25, 5): + G = nx.complete_graph(n) + assert n - 1 == approx.node_connectivity(G) + assert n - 1 == approx.node_connectivity(G, 0, 3) + + +def test_empty_graphs(): + for k in range(5, 25, 5): + G = nx.empty_graph(k) + assert 0 == approx.node_connectivity(G) + assert 0 == approx.node_connectivity(G, 0, 3) + + +def test_petersen(): + G = nx.petersen_graph() + assert 3 == approx.node_connectivity(G) + assert 3 == approx.node_connectivity(G, 0, 5) + + +# Approximation fails with tutte graph +# def test_tutte(): +# G = nx.tutte_graph() +# assert_equal(3, approx.node_connectivity(G)) + + +def test_dodecahedral(): + G = nx.dodecahedral_graph() + assert 3 == approx.node_connectivity(G) + assert 3 == approx.node_connectivity(G, 0, 5) + + +def test_octahedral(): + G = nx.octahedral_graph() + assert 4 == approx.node_connectivity(G) + assert 4 == approx.node_connectivity(G, 0, 5) + + +# Approximation can fail with icosahedral graph depending +# on iteration order. +# def test_icosahedral(): +# G=nx.icosahedral_graph() +# assert_equal(5, approx.node_connectivity(G)) +# assert_equal(5, approx.node_connectivity(G, 0, 5)) + + +def test_only_source(): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXError, approx.node_connectivity, G, s=0) + + +def test_only_target(): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXError, approx.node_connectivity, G, t=0) + + +def test_missing_source(): + G = nx.path_graph(4) + pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 10, 1) + + +def test_missing_target(): + G = nx.path_graph(4) + pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 1, 10) + + +def test_source_equals_target(): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXError, approx.local_node_connectivity, G, 0, 0) + + +def test_directed_node_connectivity(): + G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction + D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges + assert 1 == approx.node_connectivity(G) + assert 1 == approx.node_connectivity(G, 1, 4) + assert 2 == approx.node_connectivity(D) + assert 2 == approx.node_connectivity(D, 1, 4) + + +class TestAllPairsNodeConnectivityApprox: + @classmethod + def setup_class(cls): + cls.path = nx.path_graph(7) + cls.directed_path = nx.path_graph(7, create_using=nx.DiGraph()) + cls.cycle = nx.cycle_graph(7) + cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + cls.gnp = nx.gnp_random_graph(30, 0.1) + cls.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True) + cls.K20 = nx.complete_graph(20) + cls.K10 = nx.complete_graph(10) + cls.K5 = nx.complete_graph(5) + cls.G_list = [ + cls.path, + cls.directed_path, + cls.cycle, + cls.directed_cycle, + cls.gnp, + cls.directed_gnp, + cls.K10, + cls.K5, + cls.K20, + ] + + def test_cycles(self): + K_undir = approx.all_pairs_node_connectivity(self.cycle) + for source in K_undir: + for target, k in K_undir[source].items(): + assert k == 2 + K_dir = approx.all_pairs_node_connectivity(self.directed_cycle) + for source in K_dir: + for target, k in K_dir[source].items(): + assert k == 1 + + def test_complete(self): + for G in [self.K10, self.K5, self.K20]: + K = approx.all_pairs_node_connectivity(G) + for source in K: + for target, k in K[source].items(): + assert k == len(G) - 1 + + def test_paths(self): + K_undir = approx.all_pairs_node_connectivity(self.path) + for source in K_undir: + for target, k in K_undir[source].items(): + assert k == 1 + K_dir = approx.all_pairs_node_connectivity(self.directed_path) + for source in K_dir: + for target, k in K_dir[source].items(): + if source < target: + assert k == 1 + else: + assert k == 0 + + def test_cutoff(self): + for G in [self.K10, self.K5, self.K20]: + for mp in [2, 3, 4]: + paths = approx.all_pairs_node_connectivity(G, cutoff=mp) + for source in paths: + for target, K in paths[source].items(): + assert K == mp + + def test_all_pairs_connectivity_nbunch(self): + G = nx.complete_graph(5) + nbunch = [0, 2, 3] + C = approx.all_pairs_node_connectivity(G, nbunch=nbunch) + assert len(C) == len(nbunch) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_density.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_density.py new file mode 100644 index 0000000..5cf1dc5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_density.py @@ -0,0 +1,138 @@ +import pytest + +import networkx as nx +import networkx.algorithms.approximation as approx + + +def close_cliques_example(d=12, D=300, h=24, k=2): + """ + Hard example from Harb, Elfarouk, Kent Quanrud, and Chandra Chekuri. + "Faster and scalable algorithms for densest subgraph and decomposition." + Advances in Neural Information Processing Systems 35 (2022): 26966-26979. + """ + Kh = nx.complete_graph(h) + KdD = nx.complete_bipartite_graph(d, D) + G = nx.disjoint_union_all([KdD] + [Kh for _ in range(k)]) + best_density = d * D / (d + D) # of the complete bipartite graph + return G, best_density, set(KdD.nodes) + + +@pytest.mark.parametrize("iterations", (1, 3)) +@pytest.mark.parametrize("n", range(4, 7)) +@pytest.mark.parametrize("method", ("greedy++", "fista")) +def test_star(n, iterations, method): + if method == "fista": + pytest.importorskip("numpy") + + G = nx.star_graph(n) + # The densest subgraph of a star network is the entire graph. + # The peeling algorithm would peel all the vertices with degree 1, + # and so should discover the densest subgraph in one iteration! + d, S = approx.densest_subgraph(G, iterations=iterations, method=method) + + assert d == pytest.approx(G.number_of_edges() / G.number_of_nodes()) + assert S == set(G) # The entire graph! + + +@pytest.mark.parametrize("method", ("greedy++", "fista")) +def test_greedy_plus_plus_complete_graph(method): + if method == "fista": + pytest.importorskip("numpy") + + G = nx.complete_graph(4) + # The density of a complete graph network is the entire graph: C(4, 2)/4 + # where C(n, 2) is n*(n-1)//2. The peeling algorithm would find + # the densest subgraph in one iteration! + d, S = approx.densest_subgraph(G, iterations=1, method=method) + + assert d == pytest.approx(6 / 4) # The density, 4/5=0.8. + assert S == {0, 1, 2, 3} # The entire graph! + + +def test_greedy_plus_plus_close_cliques(): + G, best_density, densest_set = close_cliques_example() + # NOTE: iterations=185 fails to ID the densest subgraph + greedy_pp, S_pp = approx.densest_subgraph(G, iterations=186, method="greedy++") + + assert greedy_pp == pytest.approx(best_density) + assert S_pp == densest_set + + +def test_fista_close_cliques(): + pytest.importorskip("numpy") + G, best_density, best_set = close_cliques_example() + # NOTE: iterations=12 fails to ID the densest subgraph + density, dense_set = approx.densest_subgraph(G, iterations=13, method="fista") + + assert density == pytest.approx(best_density) + assert dense_set == best_set + + +def bipartite_and_clique_example(d=5, D=200, k=2): + """ + Hard example from: Boob, Digvijay, Yu Gao, Richard Peng, Saurabh Sawlani, + Charalampos Tsourakakis, Di Wang, and Junxing Wang. "Flowless: Extracting + densest subgraphs without flow computations." In Proceedings of The Web + Conference 2020, pp. 573-583. 2020. + """ + B = nx.complete_bipartite_graph(d, D) + H = [nx.complete_graph(d + 2) for _ in range(k)] + G = nx.disjoint_union_all([B] + H) + + best_density = d * D / (d + D) # of the complete bipartite graph + correct_one_round_density = (2 * d * D + (d + 1) * (d + 2) * k) / ( + 2 * d + 2 * D + 2 * k * (d + 2) + ) + best_subgraph = set(B.nodes) + return G, best_density, best_subgraph, correct_one_round_density + + +def test_greedy_plus_plus_bipartite_and_clique(): + G, best_density, best_subgraph, correct_one_iter_density = ( + bipartite_and_clique_example() + ) + one_round_density, S_one = approx.densest_subgraph( + G, iterations=1, method="greedy++" + ) + assert one_round_density == pytest.approx(correct_one_iter_density) + assert S_one == set(G.nodes) + + ten_round_density, S_ten = approx.densest_subgraph( + G, iterations=10, method="greedy++" + ) + assert ten_round_density == pytest.approx(best_density) + assert S_ten == best_subgraph + + +def test_fista_bipartite_and_clique(): + pytest.importorskip("numpy") + G, best_density, best_subgraph, _ = bipartite_and_clique_example() + + ten_round_density, S_ten = approx.densest_subgraph(G, iterations=10, method="fista") + assert ten_round_density == pytest.approx(best_density) + assert S_ten == best_subgraph + + +def test_fista_big_dataset(): + pytest.importorskip("numpy") + G, best_density, best_subgraph = close_cliques_example(d=30, D=2000, h=60, k=20) + + # Note: iterations=12 fails to identify densest subgraph + density, dense_set = approx.densest_subgraph(G, iterations=13, method="fista") + + assert density == pytest.approx(best_density) + assert dense_set == best_subgraph + + +@pytest.mark.parametrize("iterations", (1, 3)) +def test_greedy_plus_plus_edgeless_cornercase(iterations): + G = nx.Graph() + assert approx.densest_subgraph(G, iterations=iterations, method="greedy++") == ( + 0, + set(), + ) + G.add_nodes_from(range(4)) + assert approx.densest_subgraph(G, iterations=iterations, method="greedy++") == ( + 0, + set(), + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_distance_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_distance_measures.py new file mode 100644 index 0000000..3809a8f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_distance_measures.py @@ -0,0 +1,59 @@ +"""Unit tests for the :mod:`networkx.algorithms.approximation.distance_measures` module.""" + +import pytest + +import networkx as nx +from networkx.algorithms.approximation import diameter + + +class TestDiameter: + """Unit tests for the approximate diameter function + :func:`~networkx.algorithms.approximation.distance_measures.diameter`. + """ + + def test_null_graph(self): + """Test empty graph.""" + G = nx.null_graph() + with pytest.raises( + nx.NetworkXError, match="Expected non-empty NetworkX graph!" + ): + diameter(G) + + def test_undirected_non_connected(self): + """Test an undirected disconnected graph.""" + graph = nx.path_graph(10) + graph.remove_edge(3, 4) + with pytest.raises(nx.NetworkXError, match="Graph not connected."): + diameter(graph) + + def test_directed_non_strongly_connected(self): + """Test a directed non strongly connected graph.""" + graph = nx.path_graph(10, create_using=nx.DiGraph()) + with pytest.raises(nx.NetworkXError, match="DiGraph not strongly connected."): + diameter(graph) + + def test_complete_undirected_graph(self): + """Test a complete undirected graph.""" + graph = nx.complete_graph(10) + assert diameter(graph) == 1 + + def test_complete_directed_graph(self): + """Test a complete directed graph.""" + graph = nx.complete_graph(10, create_using=nx.DiGraph()) + assert diameter(graph) == 1 + + def test_undirected_path_graph(self): + """Test an undirected path graph with 10 nodes.""" + graph = nx.path_graph(10) + assert diameter(graph) == 9 + + def test_directed_path_graph(self): + """Test a directed path graph with 10 nodes.""" + graph = nx.path_graph(10).to_directed() + assert diameter(graph) == 9 + + def test_single_node(self): + """Test a graph which contains just a node.""" + graph = nx.Graph() + graph.add_node(1) + assert diameter(graph) == 0 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py new file mode 100644 index 0000000..6b90d85 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_dominating_set.py @@ -0,0 +1,78 @@ +import pytest + +import networkx as nx +from networkx.algorithms.approximation import ( + min_edge_dominating_set, + min_weighted_dominating_set, +) + + +class TestMinWeightDominatingSet: + def test_min_weighted_dominating_set(self): + graph = nx.Graph() + graph.add_edge(1, 2) + graph.add_edge(1, 5) + graph.add_edge(2, 3) + graph.add_edge(2, 5) + graph.add_edge(3, 4) + graph.add_edge(3, 6) + graph.add_edge(5, 6) + + vertices = {1, 2, 3, 4, 5, 6} + # due to ties, this might be hard to test tight bounds + dom_set = min_weighted_dominating_set(graph) + for vertex in vertices - dom_set: + neighbors = set(graph.neighbors(vertex)) + assert len(neighbors & dom_set) > 0, "Non dominating set found!" + + def test_star_graph(self): + """Tests that an approximate dominating set for the star graph, + even when the center node does not have the smallest integer + label, gives just the center node. + + For more information, see #1527. + + """ + # Create a star graph in which the center node has the highest + # label instead of the lowest. + G = nx.star_graph(10) + G = nx.relabel_nodes(G, {0: 9, 9: 0}) + assert min_weighted_dominating_set(G) == {9} + + def test_null_graph(self): + """Tests that the unique dominating set for the null graph is an empty set""" + G = nx.Graph() + assert min_weighted_dominating_set(G) == set() + + def test_min_edge_dominating_set(self): + graph = nx.path_graph(5) + dom_set = min_edge_dominating_set(graph) + + # this is a crappy way to test, but good enough for now. + for edge in graph.edges(): + if edge in dom_set: + continue + else: + u, v = edge + found = False + for dom_edge in dom_set: + found |= u == dom_edge[0] or u == dom_edge[1] + assert found, "Non adjacent edge found!" + + graph = nx.complete_graph(10) + dom_set = min_edge_dominating_set(graph) + + # this is a crappy way to test, but good enough for now. + for edge in graph.edges(): + if edge in dom_set: + continue + else: + u, v = edge + found = False + for dom_edge in dom_set: + found |= u == dom_edge[0] or u == dom_edge[1] + assert found, "Non adjacent edge found!" + + graph = nx.Graph() # empty Networkx graph + with pytest.raises(ValueError, match="Expected non-empty NetworkX graph!"): + min_edge_dominating_set(graph) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py new file mode 100644 index 0000000..65ba802 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_kcomponents.py @@ -0,0 +1,303 @@ +# Test for approximation to k-components algorithm +import pytest + +import networkx as nx +from networkx.algorithms.approximation import k_components +from networkx.algorithms.approximation.kcomponents import _AntiGraph, _same + + +def build_k_number_dict(k_components): + k_num = {} + for k, comps in sorted(k_components.items()): + for comp in comps: + for node in comp: + k_num[node] = k + return k_num + + +## +# Some nice synthetic graphs +## + + +def graph_example_1(): + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") + labels = {v: k for k, v in rlabels.items()} + + for nodes in [ + (labels[(0, 0)], labels[(1, 0)]), + (labels[(0, 4)], labels[(1, 4)]), + (labels[(3, 0)], labels[(4, 0)]), + (labels[(3, 4)], labels[(4, 4)]), + ]: + new_node = G.order() + 1 + # Petersen graph is triconnected + P = nx.petersen_graph() + G = nx.disjoint_union(G, P) + # Add two edges between the grid and P + G.add_edge(new_node + 1, nodes[0]) + G.add_edge(new_node, nodes[1]) + # K5 is 4-connected + K = nx.complete_graph(5) + G = nx.disjoint_union(G, K) + # Add three edges between P and K5 + G.add_edge(new_node + 2, new_node + 11) + G.add_edge(new_node + 3, new_node + 12) + G.add_edge(new_node + 4, new_node + 13) + # Add another K5 sharing a node + G = nx.disjoint_union(G, K) + nbrs = G[new_node + 10] + G.remove_node(new_node + 10) + for nbr in nbrs: + G.add_edge(new_node + 17, nbr) + G.add_edge(new_node + 16, new_node + 5) + return G + + +def torrents_and_ferraro_graph(): + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") + labels = {v: k for k, v in rlabels.items()} + + for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]: + new_node = G.order() + 1 + # Petersen graph is triconnected + P = nx.petersen_graph() + G = nx.disjoint_union(G, P) + # Add two edges between the grid and P + G.add_edge(new_node + 1, nodes[0]) + G.add_edge(new_node, nodes[1]) + # K5 is 4-connected + K = nx.complete_graph(5) + G = nx.disjoint_union(G, K) + # Add three edges between P and K5 + G.add_edge(new_node + 2, new_node + 11) + G.add_edge(new_node + 3, new_node + 12) + G.add_edge(new_node + 4, new_node + 13) + # Add another K5 sharing a node + G = nx.disjoint_union(G, K) + nbrs = G[new_node + 10] + G.remove_node(new_node + 10) + for nbr in nbrs: + G.add_edge(new_node + 17, nbr) + # Commenting this makes the graph not biconnected !! + # This stupid mistake make one reviewer very angry :P + G.add_edge(new_node + 16, new_node + 8) + + for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]: + new_node = G.order() + 1 + # Petersen graph is triconnected + P = nx.petersen_graph() + G = nx.disjoint_union(G, P) + # Add two edges between the grid and P + G.add_edge(new_node + 1, nodes[0]) + G.add_edge(new_node, nodes[1]) + # K5 is 4-connected + K = nx.complete_graph(5) + G = nx.disjoint_union(G, K) + # Add three edges between P and K5 + G.add_edge(new_node + 2, new_node + 11) + G.add_edge(new_node + 3, new_node + 12) + G.add_edge(new_node + 4, new_node + 13) + # Add another K5 sharing two nodes + G = nx.disjoint_union(G, K) + nbrs = G[new_node + 10] + G.remove_node(new_node + 10) + for nbr in nbrs: + G.add_edge(new_node + 17, nbr) + nbrs2 = G[new_node + 9] + G.remove_node(new_node + 9) + for nbr in nbrs2: + G.add_edge(new_node + 18, nbr) + return G + + +# Helper function + + +def _check_connectivity(G): + result = k_components(G) + for k, components in result.items(): + if k < 3: + continue + for component in components: + C = G.subgraph(component) + K = nx.node_connectivity(C) + assert K >= k + + +def test_torrents_and_ferraro_graph(): + G = torrents_and_ferraro_graph() + _check_connectivity(G) + + +def test_example_1(): + G = graph_example_1() + _check_connectivity(G) + + +def test_karate_0(): + G = nx.karate_club_graph() + _check_connectivity(G) + + +def test_karate_1(): + karate_k_num = { + 0: 4, + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 3, + 7: 4, + 8: 4, + 9: 2, + 10: 3, + 11: 1, + 12: 2, + 13: 4, + 14: 2, + 15: 2, + 16: 2, + 17: 2, + 18: 2, + 19: 3, + 20: 2, + 21: 2, + 22: 2, + 23: 3, + 24: 3, + 25: 3, + 26: 2, + 27: 3, + 28: 3, + 29: 3, + 30: 4, + 31: 3, + 32: 4, + 33: 4, + } + approx_karate_k_num = karate_k_num.copy() + approx_karate_k_num[24] = 2 + approx_karate_k_num[25] = 2 + G = nx.karate_club_graph() + k_comps = k_components(G) + k_num = build_k_number_dict(k_comps) + assert k_num in (karate_k_num, approx_karate_k_num) + + +def test_example_1_detail_3_and_4(): + G = graph_example_1() + result = k_components(G) + # In this example graph there are 8 3-components, 4 with 15 nodes + # and 4 with 5 nodes. + assert len(result[3]) == 8 + assert len([c for c in result[3] if len(c) == 15]) == 4 + assert len([c for c in result[3] if len(c) == 5]) == 4 + # There are also 8 4-components all with 5 nodes. + assert len(result[4]) == 8 + assert all(len(c) == 5 for c in result[4]) + # Finally check that the k-components detected have actually node + # connectivity >= k. + for k, components in result.items(): + if k < 3: + continue + for component in components: + K = nx.node_connectivity(G.subgraph(component)) + assert K >= k + + +def test_directed(): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.gnp_random_graph(10, 0.4, directed=True) + kc = k_components(G) + + +def test_same(): + equal = {"A": 2, "B": 2, "C": 2} + slightly_different = {"A": 2, "B": 1, "C": 2} + different = {"A": 2, "B": 8, "C": 18} + assert _same(equal) + assert not _same(slightly_different) + assert _same(slightly_different, tol=1) + assert not _same(different) + assert not _same(different, tol=4) + + +class TestAntiGraph: + @classmethod + def setup_class(cls): + cls.Gnp = nx.gnp_random_graph(20, 0.8, seed=42) + cls.Anp = _AntiGraph(nx.complement(cls.Gnp)) + cls.Gd = nx.davis_southern_women_graph() + cls.Ad = _AntiGraph(nx.complement(cls.Gd)) + cls.Gk = nx.karate_club_graph() + cls.Ak = _AntiGraph(nx.complement(cls.Gk)) + cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)] + + def test_size(self): + for G, A in self.GA: + n = G.order() + s = len(list(G.edges())) + len(list(A.edges())) + assert s == (n * (n - 1)) / 2 + + def test_degree(self): + for G, A in self.GA: + assert sorted(G.degree()) == sorted(A.degree()) + + def test_core_number(self): + for G, A in self.GA: + assert nx.core_number(G) == nx.core_number(A) + + def test_connected_components(self): + # ccs are same unless isolated nodes or any node has degree=len(G)-1 + # graphs in self.GA avoid this problem + for G, A in self.GA: + gc = [set(c) for c in nx.connected_components(G)] + ac = [set(c) for c in nx.connected_components(A)] + for comp in ac: + assert comp in gc + + def test_adj(self): + for G, A in self.GA: + for n, nbrs in G.adj.items(): + a_adj = sorted((n, sorted(ad)) for n, ad in A.adj.items()) + g_adj = sorted((n, sorted(ad)) for n, ad in G.adj.items()) + assert a_adj == g_adj + + def test_adjacency(self): + for G, A in self.GA: + a_adj = list(A.adjacency()) + for n, nbrs in G.adjacency(): + assert (n, set(nbrs)) in a_adj + + def test_neighbors(self): + for G, A in self.GA: + node = list(G.nodes())[0] + assert set(G.neighbors(node)) == set(A.neighbors(node)) + + def test_node_not_in_graph(self): + for G, A in self.GA: + node = "non_existent_node" + pytest.raises(nx.NetworkXError, A.neighbors, node) + pytest.raises(nx.NetworkXError, G.neighbors, node) + + def test_degree_thingraph(self): + for G, A in self.GA: + node = list(G.nodes())[0] + nodes = list(G.nodes())[1:4] + assert G.degree(node) == A.degree(node) + assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree()) + # AntiGraph is a ThinGraph, so all the weights are 1 + assert sum(d for n, d in A.degree()) == sum( + d for n, d in A.degree(weight="weight") + ) + assert sum(d for n, d in G.degree(nodes)) == sum( + d for n, d in A.degree(nodes) + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_matching.py new file mode 100644 index 0000000..f50da3d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_matching.py @@ -0,0 +1,8 @@ +import networkx as nx +import networkx.algorithms.approximation as a + + +def test_min_maximal_matching(): + # smoke test + G = nx.Graph() + assert len(a.min_maximal_matching(G)) == 0 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_maxcut.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_maxcut.py new file mode 100644 index 0000000..ef04244 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_maxcut.py @@ -0,0 +1,94 @@ +import random + +import pytest + +import networkx as nx +from networkx.algorithms.approximation import maxcut + + +@pytest.mark.parametrize( + "f", (nx.approximation.randomized_partitioning, nx.approximation.one_exchange) +) +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(f, graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + f(G) + + +def _is_valid_cut(G, set1, set2): + union = set1.union(set2) + assert union == set(G.nodes) + assert len(set1) + len(set2) == G.number_of_nodes() + + +def _cut_is_locally_optimal(G, cut_size, set1): + # test if cut can be locally improved + for i, node in enumerate(set1): + cut_size_without_node = nx.algorithms.cut_size( + G, set1 - {node}, weight="weight" + ) + assert cut_size_without_node <= cut_size + + +def test_random_partitioning(): + G = nx.complete_graph(5) + _, (set1, set2) = maxcut.randomized_partitioning(G, seed=5) + _is_valid_cut(G, set1, set2) + + +def test_random_partitioning_all_to_one(): + G = nx.complete_graph(5) + _, (set1, set2) = maxcut.randomized_partitioning(G, p=1) + _is_valid_cut(G, set1, set2) + assert len(set1) == G.number_of_nodes() + assert len(set2) == 0 + + +def test_one_exchange_basic(): + G = nx.complete_graph(5) + random.seed(5) + for u, v, w in G.edges(data=True): + w["weight"] = random.randrange(-100, 100, 1) / 10 + + initial_cut = set(random.sample(sorted(G.nodes()), k=5)) + cut_size, (set1, set2) = maxcut.one_exchange( + G, initial_cut, weight="weight", seed=5 + ) + + _is_valid_cut(G, set1, set2) + _cut_is_locally_optimal(G, cut_size, set1) + + +def test_one_exchange_optimal(): + # Greedy one exchange should find the optimal solution for this graph (14) + G = nx.Graph() + G.add_edge(1, 2, weight=3) + G.add_edge(1, 3, weight=3) + G.add_edge(1, 4, weight=3) + G.add_edge(1, 5, weight=3) + G.add_edge(2, 3, weight=5) + + cut_size, (set1, set2) = maxcut.one_exchange(G, weight="weight", seed=5) + + _is_valid_cut(G, set1, set2) + _cut_is_locally_optimal(G, cut_size, set1) + # check global optimality + assert cut_size == 14 + + +def test_negative_weights(): + G = nx.complete_graph(5) + random.seed(5) + for u, v, w in G.edges(data=True): + w["weight"] = -1 * random.random() + + initial_cut = set(random.sample(sorted(G.nodes()), k=5)) + cut_size, (set1, set2) = maxcut.one_exchange(G, initial_cut, weight="weight") + + # make sure it is a valid cut + _is_valid_cut(G, set1, set2) + # check local optimality + _cut_is_locally_optimal(G, cut_size, set1) + # test that all nodes are in the same partition + assert len(set1) == len(G.nodes) or len(set2) == len(G.nodes) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py new file mode 100644 index 0000000..32fe1fb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_ramsey.py @@ -0,0 +1,31 @@ +import networkx as nx +import networkx.algorithms.approximation as apxa + + +def test_ramsey(): + # this should only find the complete graph + graph = nx.complete_graph(10) + c, i = apxa.ramsey_R2(graph) + cdens = nx.density(graph.subgraph(c)) + assert cdens == 1.0, "clique not correctly found by ramsey!" + idens = nx.density(graph.subgraph(i)) + assert idens == 0.0, "i-set not correctly found by ramsey!" + + # this trivial graph has no cliques. should just find i-sets + graph = nx.trivial_graph() + c, i = apxa.ramsey_R2(graph) + assert c == {0}, "clique not correctly found by ramsey!" + assert i == {0}, "i-set not correctly found by ramsey!" + + graph = nx.barbell_graph(10, 5, nx.Graph()) + c, i = apxa.ramsey_R2(graph) + cdens = nx.density(graph.subgraph(c)) + assert cdens == 1.0, "clique not correctly found by ramsey!" + idens = nx.density(graph.subgraph(i)) + assert idens == 0.0, "i-set not correctly found by ramsey!" + + # add self-loops and test again + graph.add_edges_from([(n, n) for n in range(0, len(graph), 2)]) + cc, ii = apxa.ramsey_R2(graph) + assert cc == c + assert ii == i diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py new file mode 100644 index 0000000..246e6f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_steinertree.py @@ -0,0 +1,265 @@ +import pytest + +import networkx as nx +from networkx.algorithms.approximation.steinertree import ( + _remove_nonterminal_leaves, + metric_closure, + steiner_tree, +) +from networkx.utils import edges_equal + + +class TestSteinerTree: + @classmethod + def setup_class(cls): + G1 = nx.Graph() + G1.add_edge(1, 2, weight=10) + G1.add_edge(2, 3, weight=10) + G1.add_edge(3, 4, weight=10) + G1.add_edge(4, 5, weight=10) + G1.add_edge(5, 6, weight=10) + G1.add_edge(2, 7, weight=1) + G1.add_edge(7, 5, weight=1) + + G2 = nx.Graph() + G2.add_edge(0, 5, weight=6) + G2.add_edge(1, 2, weight=2) + G2.add_edge(1, 5, weight=3) + G2.add_edge(2, 4, weight=4) + G2.add_edge(3, 5, weight=5) + G2.add_edge(4, 5, weight=1) + + G3 = nx.Graph() + G3.add_edge(1, 2, weight=8) + G3.add_edge(1, 9, weight=3) + G3.add_edge(1, 8, weight=6) + G3.add_edge(1, 10, weight=2) + G3.add_edge(1, 14, weight=3) + G3.add_edge(2, 3, weight=6) + G3.add_edge(3, 4, weight=3) + G3.add_edge(3, 10, weight=2) + G3.add_edge(3, 11, weight=1) + G3.add_edge(4, 5, weight=1) + G3.add_edge(4, 11, weight=1) + G3.add_edge(5, 6, weight=4) + G3.add_edge(5, 11, weight=2) + G3.add_edge(5, 12, weight=1) + G3.add_edge(5, 13, weight=3) + G3.add_edge(6, 7, weight=2) + G3.add_edge(6, 12, weight=3) + G3.add_edge(6, 13, weight=1) + G3.add_edge(7, 8, weight=3) + G3.add_edge(7, 9, weight=3) + G3.add_edge(7, 11, weight=5) + G3.add_edge(7, 13, weight=2) + G3.add_edge(7, 14, weight=4) + G3.add_edge(8, 9, weight=2) + G3.add_edge(9, 14, weight=1) + G3.add_edge(10, 11, weight=2) + G3.add_edge(10, 14, weight=1) + G3.add_edge(11, 12, weight=1) + G3.add_edge(11, 14, weight=7) + G3.add_edge(12, 14, weight=3) + G3.add_edge(12, 15, weight=1) + G3.add_edge(13, 14, weight=4) + G3.add_edge(13, 15, weight=1) + G3.add_edge(14, 15, weight=2) + + cls.G1 = G1 + cls.G2 = G2 + cls.G3 = G3 + cls.G1_term_nodes = [1, 2, 3, 4, 5] + cls.G2_term_nodes = [0, 2, 3] + cls.G3_term_nodes = [1, 3, 5, 6, 8, 10, 11, 12, 13] + + cls.methods = ["kou", "mehlhorn"] + + def test_connected_metric_closure(self): + G = self.G1.copy() + G.add_node(100) + pytest.raises(nx.NetworkXError, metric_closure, G) + + def test_metric_closure(self): + M = metric_closure(self.G1) + mc = [ + (1, 2, {"distance": 10, "path": [1, 2]}), + (1, 3, {"distance": 20, "path": [1, 2, 3]}), + (1, 4, {"distance": 22, "path": [1, 2, 7, 5, 4]}), + (1, 5, {"distance": 12, "path": [1, 2, 7, 5]}), + (1, 6, {"distance": 22, "path": [1, 2, 7, 5, 6]}), + (1, 7, {"distance": 11, "path": [1, 2, 7]}), + (2, 3, {"distance": 10, "path": [2, 3]}), + (2, 4, {"distance": 12, "path": [2, 7, 5, 4]}), + (2, 5, {"distance": 2, "path": [2, 7, 5]}), + (2, 6, {"distance": 12, "path": [2, 7, 5, 6]}), + (2, 7, {"distance": 1, "path": [2, 7]}), + (3, 4, {"distance": 10, "path": [3, 4]}), + (3, 5, {"distance": 12, "path": [3, 2, 7, 5]}), + (3, 6, {"distance": 22, "path": [3, 2, 7, 5, 6]}), + (3, 7, {"distance": 11, "path": [3, 2, 7]}), + (4, 5, {"distance": 10, "path": [4, 5]}), + (4, 6, {"distance": 20, "path": [4, 5, 6]}), + (4, 7, {"distance": 11, "path": [4, 5, 7]}), + (5, 6, {"distance": 10, "path": [5, 6]}), + (5, 7, {"distance": 1, "path": [5, 7]}), + (6, 7, {"distance": 11, "path": [6, 5, 7]}), + ] + assert edges_equal(list(M.edges(data=True)), mc) + + def test_steiner_tree(self): + valid_steiner_trees = [ + [ + [ + (1, 2, {"weight": 10}), + (2, 3, {"weight": 10}), + (2, 7, {"weight": 1}), + (3, 4, {"weight": 10}), + (5, 7, {"weight": 1}), + ], + [ + (1, 2, {"weight": 10}), + (2, 7, {"weight": 1}), + (3, 4, {"weight": 10}), + (4, 5, {"weight": 10}), + (5, 7, {"weight": 1}), + ], + [ + (1, 2, {"weight": 10}), + (2, 3, {"weight": 10}), + (2, 7, {"weight": 1}), + (4, 5, {"weight": 10}), + (5, 7, {"weight": 1}), + ], + ], + [ + [ + (0, 5, {"weight": 6}), + (1, 2, {"weight": 2}), + (1, 5, {"weight": 3}), + (3, 5, {"weight": 5}), + ], + [ + (0, 5, {"weight": 6}), + (4, 2, {"weight": 4}), + (4, 5, {"weight": 1}), + (3, 5, {"weight": 5}), + ], + ], + [ + [ + (1, 10, {"weight": 2}), + (3, 10, {"weight": 2}), + (3, 11, {"weight": 1}), + (5, 12, {"weight": 1}), + (6, 13, {"weight": 1}), + (8, 9, {"weight": 2}), + (9, 14, {"weight": 1}), + (10, 14, {"weight": 1}), + (11, 12, {"weight": 1}), + (12, 15, {"weight": 1}), + (13, 15, {"weight": 1}), + ] + ], + ] + for method in self.methods: + for G, term_nodes, valid_trees in zip( + [self.G1, self.G2, self.G3], + [self.G1_term_nodes, self.G2_term_nodes, self.G3_term_nodes], + valid_steiner_trees, + ): + S = steiner_tree(G, term_nodes, method=method) + assert any( + edges_equal(list(S.edges(data=True)), valid_tree) + for valid_tree in valid_trees + ) + + def test_multigraph_steiner_tree(self): + G = nx.MultiGraph() + G.add_edges_from( + [ + (1, 2, 0, {"weight": 1}), + (2, 3, 0, {"weight": 999}), + (2, 3, 1, {"weight": 1}), + (3, 4, 0, {"weight": 1}), + (3, 5, 0, {"weight": 1}), + ] + ) + terminal_nodes = [2, 4, 5] + expected_edges = [ + (2, 3, 1, {"weight": 1}), # edge with key 1 has lower weight + (3, 4, 0, {"weight": 1}), + (3, 5, 0, {"weight": 1}), + ] + for method in self.methods: + S = steiner_tree(G, terminal_nodes, method=method) + assert edges_equal(S.edges(data=True, keys=True), expected_edges) + + def test_remove_nonterminal_leaves(self): + G = nx.path_graph(10) + _remove_nonterminal_leaves(G, [4, 5, 6]) + + assert list(G) == [4, 5, 6] # only the terminal nodes are left + + +@pytest.mark.parametrize("method", ("kou", "mehlhorn")) +def test_steiner_tree_weight_attribute(method): + G = nx.star_graph(4) + # Add an edge attribute that is named something other than "weight" + nx.set_edge_attributes(G, dict.fromkeys(G.edges, 10), name="distance") + H = nx.approximation.steiner_tree(G, [1, 3], method=method, weight="distance") + assert nx.utils.edges_equal(H.edges, [(0, 1), (0, 3)]) + + +@pytest.mark.parametrize("method", ("kou", "mehlhorn")) +def test_steiner_tree_multigraph_weight_attribute(method): + G = nx.cycle_graph(3, create_using=nx.MultiGraph) + nx.set_edge_attributes(G, dict.fromkeys(G.edges, 10), name="distance") + G.add_edge(2, 0, distance=5) + H = nx.approximation.steiner_tree(G, list(G), method=method, weight="distance") + assert len(H.edges) == 2 and H.has_edge(2, 0, key=1) + assert sum(dist for *_, dist in H.edges(data="distance")) == 15 + + +@pytest.mark.parametrize("method", (None, "mehlhorn", "kou")) +def test_steiner_tree_methods(method): + G = nx.star_graph(4) + expected = nx.Graph([(0, 1), (0, 3)]) + st = nx.approximation.steiner_tree(G, [1, 3], method=method) + assert nx.utils.edges_equal(st.edges, expected.edges) + + +def test_steiner_tree_method_invalid(): + G = nx.star_graph(4) + with pytest.raises( + ValueError, match="invalid_method is not a valid choice for an algorithm." + ): + nx.approximation.steiner_tree(G, terminal_nodes=[1, 3], method="invalid_method") + + +def test_steiner_tree_remove_non_terminal_leaves_self_loop_edges(): + # To verify that the last step of the steiner tree approximation + # behaves in the case where a non-terminal leaf has a self loop edge + G = nx.path_graph(10) + + # Add self loops to the terminal nodes + G.add_edges_from([(2, 2), (3, 3), (4, 4), (7, 7), (8, 8)]) + + # Remove non-terminal leaves + _remove_nonterminal_leaves(G, [4, 5, 6, 7]) + + # The terminal nodes should be left + assert list(G) == [4, 5, 6, 7] # only the terminal nodes are left + + +def test_steiner_tree_non_terminal_leaves_multigraph_self_loop_edges(): + # To verify that the last step of the steiner tree approximation + # behaves in the case where a non-terminal leaf has a self loop edge + G = nx.MultiGraph() + G.add_edges_from([(i, i + 1) for i in range(10)]) + G.add_edges_from([(2, 2), (3, 3), (4, 4), (4, 4), (7, 7)]) + + # Remove non-terminal leaves + _remove_nonterminal_leaves(G, [4, 5, 6, 7]) + + # Only the terminal nodes should be left + assert list(G) == [4, 5, 6, 7] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py new file mode 100644 index 0000000..d512428 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_traveling_salesman.py @@ -0,0 +1,1013 @@ +"""Unit tests for the traveling_salesman module.""" + +import random + +import pytest + +import networkx as nx +import networkx.algorithms.approximation as nx_app + +pairwise = nx.utils.pairwise + + +def test_christofides_hamiltonian(): + random.seed(42) + G = nx.complete_graph(20) + for u, v in G.edges(): + G[u][v]["weight"] = random.randint(0, 10) + + H = nx.Graph() + H.add_edges_from(pairwise(nx_app.christofides(G))) + H.remove_edges_from(nx.find_cycle(H)) + assert len(H.edges) == 0 + + tree = nx.minimum_spanning_tree(G, weight="weight") + H = nx.Graph() + H.add_edges_from(pairwise(nx_app.christofides(G, tree))) + H.remove_edges_from(nx.find_cycle(H)) + assert len(H.edges) == 0 + + +def test_christofides_incomplete_graph(): + G = nx.complete_graph(10) + G.remove_edge(0, 1) + pytest.raises(nx.NetworkXError, nx_app.christofides, G) + + +def test_christofides_ignore_selfloops(): + G = nx.complete_graph(5) + G.add_edge(3, 3) + cycle = nx_app.christofides(G) + assert len(cycle) - 1 == len(G) == len(set(cycle)) + + +# set up graphs for other tests +class TestBase: + @classmethod + def setup_class(cls): + cls.DG = nx.DiGraph() + cls.DG.add_weighted_edges_from( + { + ("A", "B", 3), + ("A", "C", 17), + ("A", "D", 14), + ("B", "A", 3), + ("B", "C", 12), + ("B", "D", 16), + ("C", "A", 13), + ("C", "B", 12), + ("C", "D", 4), + ("D", "A", 14), + ("D", "B", 15), + ("D", "C", 2), + } + ) + cls.DG_cycle = ["D", "C", "B", "A", "D"] + cls.DG_cost = 31.0 + + cls.DG2 = nx.DiGraph() + cls.DG2.add_weighted_edges_from( + { + ("A", "B", 3), + ("A", "C", 17), + ("A", "D", 14), + ("B", "A", 30), + ("B", "C", 2), + ("B", "D", 16), + ("C", "A", 33), + ("C", "B", 32), + ("C", "D", 34), + ("D", "A", 14), + ("D", "B", 15), + ("D", "C", 2), + } + ) + cls.DG2_cycle = ["D", "A", "B", "C", "D"] + cls.DG2_cost = 53.0 + + cls.unweightedUG = nx.complete_graph(5, nx.Graph()) + cls.unweightedDG = nx.complete_graph(5, nx.DiGraph()) + + cls.incompleteUG = nx.Graph() + cls.incompleteUG.add_weighted_edges_from({(0, 1, 1), (1, 2, 3)}) + cls.incompleteDG = nx.DiGraph() + cls.incompleteDG.add_weighted_edges_from({(0, 1, 1), (1, 2, 3)}) + + cls.UG = nx.Graph() + cls.UG.add_weighted_edges_from( + { + ("A", "B", 3), + ("A", "C", 17), + ("A", "D", 14), + ("B", "C", 12), + ("B", "D", 16), + ("C", "D", 4), + } + ) + cls.UG_cycle = ["D", "C", "B", "A", "D"] + cls.UG_cost = 33.0 + + cls.UG2 = nx.Graph() + cls.UG2.add_weighted_edges_from( + { + ("A", "B", 1), + ("A", "C", 15), + ("A", "D", 5), + ("B", "C", 16), + ("B", "D", 8), + ("C", "D", 3), + } + ) + cls.UG2_cycle = ["D", "C", "B", "A", "D"] + cls.UG2_cost = 25.0 + + +def validate_solution(soln, cost, exp_soln, exp_cost): + assert soln == exp_soln + assert cost == exp_cost + + +def validate_symmetric_solution(soln, cost, exp_soln, exp_cost): + assert soln == exp_soln or soln == exp_soln[::-1] + assert cost == exp_cost + + +class TestGreedyTSP(TestBase): + def test_greedy(self): + cycle = nx_app.greedy_tsp(self.DG, source="D") + cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 31.0) + + cycle = nx_app.greedy_tsp(self.DG2, source="D") + cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 78.0) + + cycle = nx_app.greedy_tsp(self.UG, source="D") + cost = sum(self.UG[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 33.0) + + cycle = nx_app.greedy_tsp(self.UG2, source="D") + cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, ["D", "C", "A", "B", "D"], 27.0) + + def test_not_complete_graph(self): + pytest.raises(nx.NetworkXError, nx_app.greedy_tsp, self.incompleteUG) + pytest.raises(nx.NetworkXError, nx_app.greedy_tsp, self.incompleteDG) + + def test_not_weighted_graph(self): + nx_app.greedy_tsp(self.unweightedUG) + nx_app.greedy_tsp(self.unweightedDG) + + def test_two_nodes(self): + G = nx.Graph() + G.add_weighted_edges_from({(1, 2, 1)}) + cycle = nx_app.greedy_tsp(G) + cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, [1, 2, 1], 2) + + def test_ignore_selfloops(self): + G = nx.complete_graph(5) + G.add_edge(3, 3) + cycle = nx_app.greedy_tsp(G) + assert len(cycle) - 1 == len(G) == len(set(cycle)) + + +class TestSimulatedAnnealingTSP(TestBase): + tsp = staticmethod(nx_app.simulated_annealing_tsp) + + def test_simulated_annealing_directed(self): + cycle = self.tsp(self.DG, "greedy", source="D", seed=42) + cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, self.DG_cycle, self.DG_cost) + + initial_sol = ["D", "B", "A", "C", "D"] + cycle = self.tsp(self.DG, initial_sol, source="D", seed=42) + cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, self.DG_cycle, self.DG_cost) + + initial_sol = ["D", "A", "C", "B", "D"] + cycle = self.tsp(self.DG, initial_sol, move="1-0", source="D", seed=42) + cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, self.DG_cycle, self.DG_cost) + + cycle = self.tsp(self.DG2, "greedy", source="D", seed=42) + cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, self.DG2_cycle, self.DG2_cost) + + cycle = self.tsp(self.DG2, "greedy", move="1-0", source="D", seed=42) + cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, self.DG2_cycle, self.DG2_cost) + + def test_simulated_annealing_undirected(self): + cycle = self.tsp(self.UG, "greedy", source="D", seed=42) + cost = sum(self.UG[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, self.UG_cycle, self.UG_cost) + + cycle = self.tsp(self.UG2, "greedy", source="D", seed=42) + cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_symmetric_solution(cycle, cost, self.UG2_cycle, self.UG2_cost) + + cycle = self.tsp(self.UG2, "greedy", move="1-0", source="D", seed=42) + cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_symmetric_solution(cycle, cost, self.UG2_cycle, self.UG2_cost) + + def test_error_on_input_order_mistake(self): + # see issue #4846 https://github.com/networkx/networkx/issues/4846 + pytest.raises(TypeError, self.tsp, self.UG, weight="weight") + pytest.raises(nx.NetworkXError, self.tsp, self.UG, "weight") + + def test_not_complete_graph(self): + pytest.raises(nx.NetworkXError, self.tsp, self.incompleteUG, "greedy", source=0) + pytest.raises(nx.NetworkXError, self.tsp, self.incompleteDG, "greedy", source=0) + + def test_ignore_selfloops(self): + G = nx.complete_graph(5) + G.add_edge(3, 3) + cycle = self.tsp(G, "greedy") + assert len(cycle) - 1 == len(G) == len(set(cycle)) + + def test_not_weighted_graph(self): + self.tsp(self.unweightedUG, "greedy") + self.tsp(self.unweightedDG, "greedy") + + def test_two_nodes(self): + G = nx.Graph() + G.add_weighted_edges_from({(1, 2, 1)}) + + cycle = self.tsp(G, "greedy", source=1, seed=42) + cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, [1, 2, 1], 2) + + cycle = self.tsp(G, [1, 2, 1], source=1, seed=42) + cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + validate_solution(cycle, cost, [1, 2, 1], 2) + + def test_failure_of_costs_too_high_when_iterations_low(self): + # Simulated Annealing Version: + # set number of moves low and alpha high + cycle = self.tsp( + self.DG2, "greedy", source="D", move="1-0", alpha=1, N_inner=1, seed=42 + ) + cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + assert cost > self.DG2_cost + + # Try with an incorrect initial guess + initial_sol = ["D", "A", "B", "C", "D"] + cycle = self.tsp( + self.DG, + initial_sol, + source="D", + move="1-0", + alpha=0.1, + N_inner=1, + max_iterations=1, + seed=42, + ) + cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + assert cost > self.DG_cost + + +class TestThresholdAcceptingTSP(TestSimulatedAnnealingTSP): + tsp = staticmethod(nx_app.threshold_accepting_tsp) + + def test_failure_of_costs_too_high_when_iterations_low(self): + # Threshold Version: + # set number of moves low and number of iterations low + cycle = self.tsp( + self.DG2, + "greedy", + source="D", + move="1-0", + N_inner=1, + max_iterations=1, + seed=4, + ) + cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + assert cost > self.DG2_cost + + # set threshold too low + initial_sol = ["D", "A", "B", "C", "D"] + cycle = self.tsp( + self.DG, initial_sol, source="D", move="1-0", threshold=-3, seed=42 + ) + cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle)) + assert cost > self.DG_cost + + +# Tests for function traveling_salesman_problem +def test_TSP_method(): + G = nx.cycle_graph(9) + G[4][5]["weight"] = 10 + + # Test using the old currying method + def sa_tsp(G, weight): + return nx_app.simulated_annealing_tsp(G, "greedy", weight, source=4, seed=1) + + path = nx_app.traveling_salesman_problem( + G, + method=sa_tsp, + cycle=False, + ) + assert path == [4, 3, 2, 1, 0, 8, 7, 6, 5] + + +def test_TSP_unweighted(): + G = nx.cycle_graph(9) + path = nx_app.traveling_salesman_problem(G, nodes=[3, 6], cycle=False) + assert path in ([3, 4, 5, 6], [6, 5, 4, 3]) + + cycle = nx_app.traveling_salesman_problem(G, nodes=[3, 6]) + assert cycle in ([3, 4, 5, 6, 5, 4, 3], [6, 5, 4, 3, 4, 5, 6]) + + +def test_TSP_weighted(): + G = nx.cycle_graph(9) + G[0][1]["weight"] = 2 + G[1][2]["weight"] = 2 + G[2][3]["weight"] = 2 + G[3][4]["weight"] = 4 + G[4][5]["weight"] = 5 + G[5][6]["weight"] = 4 + G[6][7]["weight"] = 2 + G[7][8]["weight"] = 2 + G[8][0]["weight"] = 2 + tsp = nx_app.traveling_salesman_problem + + # path between 3 and 6 + expected_paths = ([3, 2, 1, 0, 8, 7, 6], [6, 7, 8, 0, 1, 2, 3]) + # cycle between 3 and 6 + expected_cycles = ( + [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3], + [6, 7, 8, 0, 1, 2, 3, 2, 1, 0, 8, 7, 6], + ) + # path through all nodes + expected_tourpaths = ([5, 6, 7, 8, 0, 1, 2, 3, 4], [4, 3, 2, 1, 0, 8, 7, 6, 5]) + + # Check default method + cycle = tsp(G, nodes=[3, 6], weight="weight") + assert cycle in expected_cycles + + path = tsp(G, nodes=[3, 6], weight="weight", cycle=False) + assert path in expected_paths + + tourpath = tsp(G, weight="weight", cycle=False) + assert tourpath in expected_tourpaths + + # Check all methods + methods = [ + (nx_app.christofides, {}), + (nx_app.greedy_tsp, {}), + ( + nx_app.simulated_annealing_tsp, + {"init_cycle": "greedy"}, + ), + ( + nx_app.threshold_accepting_tsp, + {"init_cycle": "greedy"}, + ), + ] + for method, kwargs in methods: + cycle = tsp(G, nodes=[3, 6], weight="weight", method=method, **kwargs) + assert cycle in expected_cycles + + path = tsp( + G, nodes=[3, 6], weight="weight", method=method, cycle=False, **kwargs + ) + assert path in expected_paths + + tourpath = tsp(G, weight="weight", method=method, cycle=False, **kwargs) + assert tourpath in expected_tourpaths + + +def test_TSP_incomplete_graph_short_path(): + G = nx.cycle_graph(9) + G.add_edges_from([(4, 9), (9, 10), (10, 11), (11, 0)]) + G[4][5]["weight"] = 5 + + cycle = nx_app.traveling_salesman_problem(G) + assert len(cycle) == 17 and len(set(cycle)) == 12 + + # make sure that cutting one edge out of complete graph formulation + # cuts out many edges out of the path of the TSP + path = nx_app.traveling_salesman_problem(G, cycle=False) + assert len(path) == 13 and len(set(path)) == 12 + + +def test_TSP_alternate_weight(): + G = nx.complete_graph(9) + G[0][1]["weight"] = 2 + G[1][2]["weight"] = 2 + G[2][3]["weight"] = 2 + G[3][4]["weight"] = 4 + G[4][5]["weight"] = 5 + G[5][6]["weight"] = 4 + G[6][7]["weight"] = 2 + G[7][8]["weight"] = 2 + G[8][0]["weight"] = 2 + + H = nx.complete_graph(9) + H[0][1]["distance"] = 2 + H[1][2]["distance"] = 2 + H[2][3]["distance"] = 2 + H[3][4]["distance"] = 4 + H[4][5]["distance"] = 5 + H[5][6]["distance"] = 4 + H[6][7]["distance"] = 2 + H[7][8]["distance"] = 2 + H[8][0]["distance"] = 2 + + assert nx_app.traveling_salesman_problem( + G, weight="weight" + ) == nx_app.traveling_salesman_problem(H, weight="distance") + + +def test_held_karp_ascent(): + """ + Test the Held-Karp relaxation with the ascent method + """ + import networkx.algorithms.approximation.traveling_salesman as tsp + + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + # Adjacency matrix from page 1153 of the 1970 Held and Karp paper + # which have been edited to be directional, but also symmetric + G_array = np.array( + [ + [0, 97, 60, 73, 17, 52], + [97, 0, 41, 52, 90, 30], + [60, 41, 0, 21, 35, 41], + [73, 52, 21, 0, 95, 46], + [17, 90, 35, 95, 0, 81], + [52, 30, 41, 46, 81, 0], + ] + ) + + solution_edges = [(1, 3), (2, 4), (3, 2), (4, 0), (5, 1), (0, 5)] + + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + opt_hk, z_star = tsp.held_karp_ascent(G) + + # Check that the optimal weights are the same + assert round(opt_hk, 2) == 207.00 + # Check that the z_stars are the same + solution = nx.DiGraph() + solution.add_edges_from(solution_edges) + assert nx.utils.edges_equal(z_star.edges, solution.edges) + + +def test_ascent_fractional_solution(): + """ + Test the ascent method using a modified version of Figure 2 on page 1140 + in 'The Traveling Salesman Problem and Minimum Spanning Trees' by Held and + Karp + """ + import networkx.algorithms.approximation.traveling_salesman as tsp + + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + # This version of Figure 2 has all of the edge weights multiplied by 100 + # and is a complete directed graph with infinite edge weights for the + # edges not listed in the original graph + G_array = np.array( + [ + [0, 100, 100, 100000, 100000, 1], + [100, 0, 100, 100000, 1, 100000], + [100, 100, 0, 1, 100000, 100000], + [100000, 100000, 1, 0, 100, 100], + [100000, 1, 100000, 100, 0, 100], + [1, 100000, 100000, 100, 100, 0], + ] + ) + + solution_z_star = { + (0, 1): 5 / 12, + (0, 2): 5 / 12, + (0, 5): 5 / 6, + (1, 0): 5 / 12, + (1, 2): 1 / 3, + (1, 4): 5 / 6, + (2, 0): 5 / 12, + (2, 1): 1 / 3, + (2, 3): 5 / 6, + (3, 2): 5 / 6, + (3, 4): 1 / 3, + (3, 5): 1 / 2, + (4, 1): 5 / 6, + (4, 3): 1 / 3, + (4, 5): 1 / 2, + (5, 0): 5 / 6, + (5, 3): 1 / 2, + (5, 4): 1 / 2, + } + + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + opt_hk, z_star = tsp.held_karp_ascent(G) + + # Check that the optimal weights are the same + assert round(opt_hk, 2) == 303.00 + # Check that the z_stars are the same + assert {key: round(z_star[key], 4) for key in z_star} == { + key: round(solution_z_star[key], 4) for key in solution_z_star + } + + +def test_ascent_method_asymmetric(): + """ + Tests the ascent method using a truly asymmetric graph for which the + solution has been brute forced + """ + import networkx.algorithms.approximation.traveling_salesman as tsp + + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + G_array = np.array( + [ + [0, 26, 63, 59, 69, 31, 41], + [62, 0, 91, 53, 75, 87, 47], + [47, 82, 0, 90, 15, 9, 18], + [68, 19, 5, 0, 58, 34, 93], + [11, 58, 53, 55, 0, 61, 79], + [88, 75, 13, 76, 98, 0, 40], + [41, 61, 55, 88, 46, 45, 0], + ] + ) + + solution_edges = [(0, 1), (1, 3), (3, 2), (2, 5), (5, 6), (4, 0), (6, 4)] + + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + opt_hk, z_star = tsp.held_karp_ascent(G) + + # Check that the optimal weights are the same + assert round(opt_hk, 2) == 190.00 + # Check that the z_stars match. + solution = nx.DiGraph() + solution.add_edges_from(solution_edges) + assert nx.utils.edges_equal(z_star.edges, solution.edges) + + +def test_ascent_method_asymmetric_2(): + """ + Tests the ascent method using a truly asymmetric graph for which the + solution has been brute forced + """ + import networkx.algorithms.approximation.traveling_salesman as tsp + + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + G_array = np.array( + [ + [0, 45, 39, 92, 29, 31], + [72, 0, 4, 12, 21, 60], + [81, 6, 0, 98, 70, 53], + [49, 71, 59, 0, 98, 94], + [74, 95, 24, 43, 0, 47], + [56, 43, 3, 65, 22, 0], + ] + ) + + solution_edges = [(0, 5), (5, 4), (1, 3), (3, 0), (2, 1), (4, 2)] + + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + opt_hk, z_star = tsp.held_karp_ascent(G) + + # Check that the optimal weights are the same + assert round(opt_hk, 2) == 144.00 + # Check that the z_stars match. + solution = nx.DiGraph() + solution.add_edges_from(solution_edges) + assert nx.utils.edges_equal(z_star.edges, solution.edges) + + +def test_held_karp_ascent_asymmetric_3(): + """ + Tests the ascent method using a truly asymmetric graph with a fractional + solution for which the solution has been brute forced. + + In this graph their are two different optimal, integral solutions (which + are also the overall atsp solutions) to the Held Karp relaxation. However, + this particular graph has two different tours of optimal value and the + possible solutions in the held_karp_ascent function are not stored in an + ordered data structure. + """ + import networkx.algorithms.approximation.traveling_salesman as tsp + + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + G_array = np.array( + [ + [0, 1, 5, 2, 7, 4], + [7, 0, 7, 7, 1, 4], + [4, 7, 0, 9, 2, 1], + [7, 2, 7, 0, 4, 4], + [5, 5, 4, 4, 0, 3], + [3, 9, 1, 3, 4, 0], + ] + ) + + solution1_edges = [(0, 3), (1, 4), (2, 5), (3, 1), (4, 2), (5, 0)] + + solution2_edges = [(0, 3), (3, 1), (1, 4), (4, 5), (2, 0), (5, 2)] + + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + opt_hk, z_star = tsp.held_karp_ascent(G) + + assert round(opt_hk, 2) == 13.00 + # Check that the z_stars are the same + solution1 = nx.DiGraph() + solution1.add_edges_from(solution1_edges) + solution2 = nx.DiGraph() + solution2.add_edges_from(solution2_edges) + assert nx.utils.edges_equal(z_star.edges, solution1.edges) or nx.utils.edges_equal( + z_star.edges, solution2.edges + ) + + +def test_held_karp_ascent_fractional_asymmetric(): + """ + Tests the ascent method using a truly asymmetric graph with a fractional + solution for which the solution has been brute forced + """ + import networkx.algorithms.approximation.traveling_salesman as tsp + + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + G_array = np.array( + [ + [0, 100, 150, 100000, 100000, 1], + [150, 0, 100, 100000, 1, 100000], + [100, 150, 0, 1, 100000, 100000], + [100000, 100000, 1, 0, 150, 100], + [100000, 2, 100000, 100, 0, 150], + [2, 100000, 100000, 150, 100, 0], + ] + ) + + solution_z_star = { + (0, 1): 5 / 12, + (0, 2): 5 / 12, + (0, 5): 5 / 6, + (1, 0): 5 / 12, + (1, 2): 5 / 12, + (1, 4): 5 / 6, + (2, 0): 5 / 12, + (2, 1): 5 / 12, + (2, 3): 5 / 6, + (3, 2): 5 / 6, + (3, 4): 5 / 12, + (3, 5): 5 / 12, + (4, 1): 5 / 6, + (4, 3): 5 / 12, + (4, 5): 5 / 12, + (5, 0): 5 / 6, + (5, 3): 5 / 12, + (5, 4): 5 / 12, + } + + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + opt_hk, z_star = tsp.held_karp_ascent(G) + + # Check that the optimal weights are the same + assert round(opt_hk, 2) == 304.00 + # Check that the z_stars are the same + assert {key: round(z_star[key], 4) for key in z_star} == { + key: round(solution_z_star[key], 4) for key in solution_z_star + } + + +def test_spanning_tree_distribution(): + """ + Test that we can create an exponential distribution of spanning trees such + that the probability of each tree is proportional to the product of edge + weights. + + Results of this test have been confirmed with hypothesis testing from the + created distribution. + + This test uses the symmetric, fractional Held Karp solution. + """ + import networkx.algorithms.approximation.traveling_salesman as tsp + + pytest.importorskip("numpy") + pytest.importorskip("scipy") + + z_star = { + (0, 1): 5 / 12, + (0, 2): 5 / 12, + (0, 5): 5 / 6, + (1, 0): 5 / 12, + (1, 2): 1 / 3, + (1, 4): 5 / 6, + (2, 0): 5 / 12, + (2, 1): 1 / 3, + (2, 3): 5 / 6, + (3, 2): 5 / 6, + (3, 4): 1 / 3, + (3, 5): 1 / 2, + (4, 1): 5 / 6, + (4, 3): 1 / 3, + (4, 5): 1 / 2, + (5, 0): 5 / 6, + (5, 3): 1 / 2, + (5, 4): 1 / 2, + } + + solution_gamma = { + (0, 1): -0.6383, + (0, 2): -0.6827, + (0, 5): 0, + (1, 2): -1.0781, + (1, 4): 0, + (2, 3): 0, + (5, 3): -0.2820, + (5, 4): -0.3327, + (4, 3): -0.9927, + } + + # The undirected support of z_star + G = nx.MultiGraph() + for u, v in z_star: + if (u, v) in G.edges or (v, u) in G.edges: + continue + G.add_edge(u, v) + + gamma = tsp.spanning_tree_distribution(G, z_star) + + assert {key: round(gamma[key], 4) for key in gamma} == solution_gamma + + +def test_asadpour_tsp(): + """ + Test the complete asadpour tsp algorithm with the fractional, symmetric + Held Karp solution. This test also uses an incomplete graph as input. + """ + # This version of Figure 2 has all of the edge weights multiplied by 100 + # and the 0 weight edges have a weight of 1. + pytest.importorskip("numpy") + pytest.importorskip("scipy") + + edge_list = [ + (0, 1, 100), + (0, 2, 100), + (0, 5, 1), + (1, 2, 100), + (1, 4, 1), + (2, 3, 1), + (3, 4, 100), + (3, 5, 100), + (4, 5, 100), + (1, 0, 100), + (2, 0, 100), + (5, 0, 1), + (2, 1, 100), + (4, 1, 1), + (3, 2, 1), + (4, 3, 100), + (5, 3, 100), + (5, 4, 100), + ] + + G = nx.DiGraph() + G.add_weighted_edges_from(edge_list) + + tour = nx_app.traveling_salesman_problem( + G, weight="weight", method=nx_app.asadpour_atsp, seed=19 + ) + + # Check that the returned list is a valid tour. Because this is an + # incomplete graph, the conditions are not as strict. We need the tour to + # + # Start and end at the same node + # Pass through every vertex at least once + # Have a total cost at most ln(6) / ln(ln(6)) = 3.0723 times the optimal + # + # For the second condition it is possible to have the tour pass through the + # same vertex more then. Imagine that the tour on the complete version takes + # an edge not in the original graph. In the output this is substituted with + # the shortest path between those vertices, allowing vertices to appear more + # than once. + # + # Even though we are using a fixed seed, multiple tours have been known to + # be returned. The first two are from the original development of this test, + # and the third one from issue #5913 on GitHub. If other tours are returned, + # add it on the list of expected tours. + expected_tours = [ + [1, 4, 5, 0, 2, 3, 2, 1], + [3, 2, 0, 1, 4, 5, 3], + [3, 2, 1, 0, 5, 4, 3], + ] + + assert tour in expected_tours + + +def test_asadpour_real_world(): + """ + This test uses airline prices between the six largest cities in the US. + + * New York City -> JFK + * Los Angeles -> LAX + * Chicago -> ORD + * Houston -> IAH + * Phoenix -> PHX + * Philadelphia -> PHL + + Flight prices from August 2021 using Delta or American airlines to get + nonstop flight. The brute force solution found the optimal tour to cost $872 + + This test also uses the `source` keyword argument to ensure that the tour + always starts at city 0. + """ + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + G_array = np.array( + [ + # JFK LAX ORD IAH PHX PHL + [0, 243, 199, 208, 169, 183], # JFK + [277, 0, 217, 123, 127, 252], # LAX + [297, 197, 0, 197, 123, 177], # ORD + [303, 169, 197, 0, 117, 117], # IAH + [257, 127, 160, 117, 0, 319], # PHX + [183, 332, 217, 117, 319, 0], # PHL + ] + ) + + node_list = ["JFK", "LAX", "ORD", "IAH", "PHX", "PHL"] + + expected_tours = [ + ["JFK", "LAX", "PHX", "ORD", "IAH", "PHL", "JFK"], + ["JFK", "ORD", "PHX", "LAX", "IAH", "PHL", "JFK"], + ] + + G = nx.from_numpy_array(G_array, nodelist=node_list, create_using=nx.DiGraph) + + tour = nx_app.traveling_salesman_problem( + G, weight="weight", method=nx_app.asadpour_atsp, seed=37, source="JFK" + ) + + assert tour in expected_tours + + +def test_asadpour_real_world_path(): + """ + This test uses airline prices between the six largest cities in the US. This + time using a path, not a cycle. + + * New York City -> JFK + * Los Angeles -> LAX + * Chicago -> ORD + * Houston -> IAH + * Phoenix -> PHX + * Philadelphia -> PHL + + Flight prices from August 2021 using Delta or American airlines to get + nonstop flight. The brute force solution found the optimal tour to cost $872 + """ + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + G_array = np.array( + [ + # JFK LAX ORD IAH PHX PHL + [0, 243, 199, 208, 169, 183], # JFK + [277, 0, 217, 123, 127, 252], # LAX + [297, 197, 0, 197, 123, 177], # ORD + [303, 169, 197, 0, 117, 117], # IAH + [257, 127, 160, 117, 0, 319], # PHX + [183, 332, 217, 117, 319, 0], # PHL + ] + ) + + node_list = ["JFK", "LAX", "ORD", "IAH", "PHX", "PHL"] + + expected_paths = [ + ["ORD", "PHX", "LAX", "IAH", "PHL", "JFK"], + ["JFK", "PHL", "IAH", "ORD", "PHX", "LAX"], + ] + + G = nx.from_numpy_array(G_array, nodelist=node_list, create_using=nx.DiGraph) + + path = nx_app.traveling_salesman_problem( + G, weight="weight", cycle=False, method=nx_app.asadpour_atsp, seed=56 + ) + + assert path in expected_paths + + +def test_asadpour_disconnected_graph(): + """ + Test that the proper exception is raised when asadpour_atsp is given an + disconnected graph. + """ + + G = nx.complete_graph(4, create_using=nx.DiGraph) + # have to set edge weights so that if the exception is not raised, the + # function will complete and we will fail the test + nx.set_edge_attributes(G, 1, "weight") + G.add_node(5) + + pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G) + + +def test_asadpour_incomplete_graph(): + """ + Test that the proper exception is raised when asadpour_atsp is given an + incomplete graph + """ + + G = nx.complete_graph(4, create_using=nx.DiGraph) + # have to set edge weights so that if the exception is not raised, the + # function will complete and we will fail the test + nx.set_edge_attributes(G, 1, "weight") + G.remove_edge(0, 1) + + pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G) + + +def test_asadpour_empty_graph(): + """ + Test the asadpour_atsp function with an empty graph + """ + G = nx.DiGraph() + + pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G) + + +def test_asadpour_small_graphs(): + # 1 node + G = nx.path_graph(1, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXError, match="at least two nodes"): + nx_app.asadpour_atsp(G) + + # 2 nodes + G = nx.DiGraph() + G.add_weighted_edges_from([(0, 1, 7), (1, 0, 8)]) + assert nx_app.asadpour_atsp(G) in [[0, 1], [1, 0]] + assert nx_app.asadpour_atsp(G, source=1) == [1, 0] + assert nx_app.asadpour_atsp(G, source=0) == [0, 1] + + +@pytest.mark.slow +def test_asadpour_integral_held_karp(): + """ + This test uses an integral held karp solution and the held karp function + will return a graph rather than a dict, bypassing most of the asadpour + algorithm. + + At first glance, this test probably doesn't look like it ensures that we + skip the rest of the asadpour algorithm, but it does. We are not fixing a + see for the random number generator, so if we sample any spanning trees + the approximation would be different basically every time this test is + executed but it is not since held karp is deterministic and we do not + reach the portion of the code with the dependence on random numbers. + """ + np = pytest.importorskip("numpy") + + G_array = np.array( + [ + [0, 26, 63, 59, 69, 31, 41], + [62, 0, 91, 53, 75, 87, 47], + [47, 82, 0, 90, 15, 9, 18], + [68, 19, 5, 0, 58, 34, 93], + [11, 58, 53, 55, 0, 61, 79], + [88, 75, 13, 76, 98, 0, 40], + [41, 61, 55, 88, 46, 45, 0], + ] + ) + + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + + for _ in range(2): + tour = nx_app.traveling_salesman_problem(G, method=nx_app.asadpour_atsp) + + assert [1, 3, 2, 5, 2, 6, 4, 0, 1] == tour + + +def test_directed_tsp_impossible(): + """ + Test the asadpour algorithm with a graph without a hamiltonian circuit + """ + pytest.importorskip("numpy") + + # In this graph, once we leave node 0 we cannot return + edges = [ + (0, 1, 10), + (0, 2, 11), + (0, 3, 12), + (1, 2, 4), + (1, 3, 6), + (2, 1, 3), + (2, 3, 2), + (3, 1, 5), + (3, 2, 1), + ] + + G = nx.DiGraph() + G.add_weighted_edges_from(edges) + + pytest.raises(nx.NetworkXError, nx_app.traveling_salesman_problem, G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py new file mode 100644 index 0000000..c40910e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_treewidth.py @@ -0,0 +1,274 @@ +import itertools + +import networkx as nx +from networkx.algorithms.approximation import ( + treewidth_min_degree, + treewidth_min_fill_in, +) +from networkx.algorithms.approximation.treewidth import ( + MinDegreeHeuristic, + min_fill_in_heuristic, +) + + +def is_tree_decomp(graph, decomp): + """Check if the given tree decomposition is valid.""" + for x in graph.nodes(): + appear_once = False + for bag in decomp.nodes(): + if x in bag: + appear_once = True + break + assert appear_once + + # Check if each connected pair of nodes are at least once together in a bag + for x, y in graph.edges(): + appear_together = False + for bag in decomp.nodes(): + if x in bag and y in bag: + appear_together = True + break + assert appear_together + + # Check if the nodes associated with vertex v form a connected subset of T + for v in graph.nodes(): + subset = [] + for bag in decomp.nodes(): + if v in bag: + subset.append(bag) + sub_graph = decomp.subgraph(subset) + assert nx.is_connected(sub_graph) + + +class TestTreewidthMinDegree: + """Unit tests for the min_degree function""" + + @classmethod + def setup_class(cls): + """Setup for different kinds of trees""" + cls.complete = nx.Graph() + cls.complete.add_edge(1, 2) + cls.complete.add_edge(2, 3) + cls.complete.add_edge(1, 3) + + cls.small_tree = nx.Graph() + cls.small_tree.add_edge(1, 3) + cls.small_tree.add_edge(4, 3) + cls.small_tree.add_edge(2, 3) + cls.small_tree.add_edge(3, 5) + cls.small_tree.add_edge(5, 6) + cls.small_tree.add_edge(5, 7) + cls.small_tree.add_edge(6, 7) + + cls.deterministic_graph = nx.Graph() + cls.deterministic_graph.add_edge(0, 1) # deg(0) = 1 + + cls.deterministic_graph.add_edge(1, 2) # deg(1) = 2 + + cls.deterministic_graph.add_edge(2, 3) + cls.deterministic_graph.add_edge(2, 4) # deg(2) = 3 + + cls.deterministic_graph.add_edge(3, 4) + cls.deterministic_graph.add_edge(3, 5) + cls.deterministic_graph.add_edge(3, 6) # deg(3) = 4 + + cls.deterministic_graph.add_edge(4, 5) + cls.deterministic_graph.add_edge(4, 6) + cls.deterministic_graph.add_edge(4, 7) # deg(4) = 5 + + cls.deterministic_graph.add_edge(5, 6) + cls.deterministic_graph.add_edge(5, 7) + cls.deterministic_graph.add_edge(5, 8) + cls.deterministic_graph.add_edge(5, 9) # deg(5) = 6 + + cls.deterministic_graph.add_edge(6, 7) + cls.deterministic_graph.add_edge(6, 8) + cls.deterministic_graph.add_edge(6, 9) # deg(6) = 6 + + cls.deterministic_graph.add_edge(7, 8) + cls.deterministic_graph.add_edge(7, 9) # deg(7) = 5 + + cls.deterministic_graph.add_edge(8, 9) # deg(8) = 4 + + def test_petersen_graph(self): + """Test Petersen graph tree decomposition result""" + G = nx.petersen_graph() + _, decomp = treewidth_min_degree(G) + is_tree_decomp(G, decomp) + + def test_small_tree_treewidth(self): + """Test small tree + + Test if the computed treewidth of the known self.small_tree is 2. + As we know which value we can expect from our heuristic, values other + than two are regressions + """ + G = self.small_tree + # the order of removal should be [1,2,4]3[5,6,7] + # (with [] denoting any order of the containing nodes) + # resulting in treewidth 2 for the heuristic + treewidth, _ = treewidth_min_fill_in(G) + assert treewidth == 2 + + def test_heuristic_abort(self): + """Test heuristic abort condition for fully connected graph""" + graph = {} + for u in self.complete: + graph[u] = set() + for v in self.complete[u]: + if u != v: # ignore self-loop + graph[u].add(v) + + deg_heuristic = MinDegreeHeuristic(graph) + node = deg_heuristic.best_node(graph) + if node is None: + pass + else: + assert False + + def test_empty_graph(self): + """Test empty graph""" + G = nx.Graph() + _, _ = treewidth_min_degree(G) + + def test_two_component_graph(self): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + treewidth, _ = treewidth_min_degree(G) + assert treewidth == 0 + + def test_not_sortable_nodes(self): + G = nx.Graph([(0, "a")]) + treewidth_min_degree(G) + + def test_heuristic_first_steps(self): + """Test first steps of min_degree heuristic""" + graph = { + n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph + } + deg_heuristic = MinDegreeHeuristic(graph) + elim_node = deg_heuristic.best_node(graph) + steps = [] + + while elim_node is not None: + steps.append(elim_node) + nbrs = graph[elim_node] + + for u, v in itertools.permutations(nbrs, 2): + if v not in graph[u]: + graph[u].add(v) + + for u in graph: + if elim_node in graph[u]: + graph[u].remove(elim_node) + + del graph[elim_node] + elim_node = deg_heuristic.best_node(graph) + + # check only the first 5 elements for equality + assert steps[:5] == [0, 1, 2, 3, 4] + + +class TestTreewidthMinFillIn: + """Unit tests for the treewidth_min_fill_in function.""" + + @classmethod + def setup_class(cls): + """Setup for different kinds of trees""" + cls.complete = nx.Graph() + cls.complete.add_edge(1, 2) + cls.complete.add_edge(2, 3) + cls.complete.add_edge(1, 3) + + cls.small_tree = nx.Graph() + cls.small_tree.add_edge(1, 2) + cls.small_tree.add_edge(2, 3) + cls.small_tree.add_edge(3, 4) + cls.small_tree.add_edge(1, 4) + cls.small_tree.add_edge(2, 4) + cls.small_tree.add_edge(4, 5) + cls.small_tree.add_edge(5, 6) + cls.small_tree.add_edge(5, 7) + cls.small_tree.add_edge(6, 7) + + cls.deterministic_graph = nx.Graph() + cls.deterministic_graph.add_edge(1, 2) + cls.deterministic_graph.add_edge(1, 3) + cls.deterministic_graph.add_edge(3, 4) + cls.deterministic_graph.add_edge(2, 4) + cls.deterministic_graph.add_edge(3, 5) + cls.deterministic_graph.add_edge(4, 5) + cls.deterministic_graph.add_edge(3, 6) + cls.deterministic_graph.add_edge(5, 6) + + def test_petersen_graph(self): + """Test Petersen graph tree decomposition result""" + G = nx.petersen_graph() + _, decomp = treewidth_min_fill_in(G) + is_tree_decomp(G, decomp) + + def test_small_tree_treewidth(self): + """Test if the computed treewidth of the known self.small_tree is 2""" + G = self.small_tree + # the order of removal should be [1,2,4]3[5,6,7] + # (with [] denoting any order of the containing nodes) + # resulting in treewidth 2 for the heuristic + treewidth, _ = treewidth_min_fill_in(G) + assert treewidth == 2 + + def test_heuristic_abort(self): + """Test if min_fill_in returns None for fully connected graph""" + graph = {} + for u in self.complete: + graph[u] = set() + for v in self.complete[u]: + if u != v: # ignore self-loop + graph[u].add(v) + next_node = min_fill_in_heuristic(graph) + if next_node is None: + pass + else: + assert False + + def test_empty_graph(self): + """Test empty graph""" + G = nx.Graph() + _, _ = treewidth_min_fill_in(G) + + def test_two_component_graph(self): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + treewidth, _ = treewidth_min_fill_in(G) + assert treewidth == 0 + + def test_not_sortable_nodes(self): + G = nx.Graph([(0, "a")]) + treewidth_min_fill_in(G) + + def test_heuristic_first_steps(self): + """Test first steps of min_fill_in heuristic""" + graph = { + n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph + } + elim_node = min_fill_in_heuristic(graph) + steps = [] + + while elim_node is not None: + steps.append(elim_node) + nbrs = graph[elim_node] + + for u, v in itertools.permutations(nbrs, 2): + if v not in graph[u]: + graph[u].add(v) + + for u in graph: + if elim_node in graph[u]: + graph[u].remove(elim_node) + + del graph[elim_node] + elim_node = min_fill_in_heuristic(graph) + + # check only the first 2 elements for equality + assert steps[:2] == [6, 5] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py new file mode 100644 index 0000000..5cc5a38 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/tests/test_vertex_cover.py @@ -0,0 +1,68 @@ +import networkx as nx +from networkx.algorithms.approximation import min_weighted_vertex_cover + + +def is_cover(G, node_cover): + return all({u, v} & node_cover for u, v in G.edges()) + + +class TestMWVC: + """Unit tests for the approximate minimum weighted vertex cover + function, + :func:`~networkx.algorithms.approximation.vertex_cover.min_weighted_vertex_cover`. + + """ + + def test_unweighted_directed(self): + # Create a star graph in which half the nodes are directed in + # and half are directed out. + G = nx.DiGraph() + G.add_edges_from((0, v) for v in range(1, 26)) + G.add_edges_from((v, 0) for v in range(26, 51)) + cover = min_weighted_vertex_cover(G) + assert 1 == len(cover) + assert is_cover(G, cover) + + def test_unweighted_undirected(self): + # create a simple star graph + size = 50 + sg = nx.star_graph(size) + cover = min_weighted_vertex_cover(sg) + assert 1 == len(cover) + assert is_cover(sg, cover) + + def test_weighted(self): + wg = nx.Graph() + wg.add_node(0, weight=10) + wg.add_node(1, weight=1) + wg.add_node(2, weight=1) + wg.add_node(3, weight=1) + wg.add_node(4, weight=1) + + wg.add_edge(0, 1) + wg.add_edge(0, 2) + wg.add_edge(0, 3) + wg.add_edge(0, 4) + + wg.add_edge(1, 2) + wg.add_edge(2, 3) + wg.add_edge(3, 4) + wg.add_edge(4, 1) + + cover = min_weighted_vertex_cover(wg, weight="weight") + csum = sum(wg.nodes[node]["weight"] for node in cover) + assert 4 == csum + assert is_cover(wg, cover) + + def test_unweighted_self_loop(self): + slg = nx.Graph() + slg.add_node(0) + slg.add_node(1) + slg.add_node(2) + + slg.add_edge(0, 1) + slg.add_edge(2, 2) + + cover = min_weighted_vertex_cover(slg) + assert 2 == len(cover) + assert is_cover(slg, cover) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py new file mode 100644 index 0000000..0c6dd52 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/traveling_salesman.py @@ -0,0 +1,1508 @@ +""" +================================= +Travelling Salesman Problem (TSP) +================================= + +Implementation of approximate algorithms +for solving and approximating the TSP problem. + +Categories of algorithms which are implemented: + +- Christofides (provides a 3/2-approximation of TSP) +- Greedy +- Simulated Annealing (SA) +- Threshold Accepting (TA) +- Asadpour Asymmetric Traveling Salesman Algorithm + +The Travelling Salesman Problem tries to find, given the weight +(distance) between all points where a salesman has to visit, the +route so that: + +- The total distance (cost) which the salesman travels is minimized. +- The salesman returns to the starting point. +- Note that for a complete graph, the salesman visits each point once. + +The function `travelling_salesman_problem` allows for incomplete +graphs by finding all-pairs shortest paths, effectively converting +the problem to a complete graph problem. It calls one of the +approximate methods on that problem and then converts the result +back to the original graph using the previously found shortest paths. + +TSP is an NP-hard problem in combinatorial optimization, +important in operations research and theoretical computer science. + +http://en.wikipedia.org/wiki/Travelling_salesman_problem +""" + +import math + +import networkx as nx +from networkx.algorithms.tree.mst import random_spanning_tree +from networkx.utils import not_implemented_for, pairwise, py_random_state + +__all__ = [ + "traveling_salesman_problem", + "christofides", + "asadpour_atsp", + "greedy_tsp", + "simulated_annealing_tsp", + "threshold_accepting_tsp", +] + + +def swap_two_nodes(soln, seed): + """Swap two nodes in `soln` to give a neighbor solution. + + Parameters + ---------- + soln : list of nodes + Current cycle of nodes + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + list + The solution after move is applied. (A neighbor solution.) + + Notes + ----- + This function assumes that the incoming list `soln` is a cycle + (that the first and last element are the same) and also that + we don't want any move to change the first node in the list + (and thus not the last node either). + + The input list is changed as well as returned. Make a copy if needed. + + See Also + -------- + move_one_node + """ + a, b = seed.sample(range(1, len(soln) - 1), k=2) + soln[a], soln[b] = soln[b], soln[a] + return soln + + +def move_one_node(soln, seed): + """Move one node to another position to give a neighbor solution. + + The node to move and the position to move to are chosen randomly. + The first and last nodes are left untouched as soln must be a cycle + starting at that node. + + Parameters + ---------- + soln : list of nodes + Current cycle of nodes + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + list + The solution after move is applied. (A neighbor solution.) + + Notes + ----- + This function assumes that the incoming list `soln` is a cycle + (that the first and last element are the same) and also that + we don't want any move to change the first node in the list + (and thus not the last node either). + + The input list is changed as well as returned. Make a copy if needed. + + See Also + -------- + swap_two_nodes + """ + a, b = seed.sample(range(1, len(soln) - 1), k=2) + soln.insert(b, soln.pop(a)) + return soln + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def christofides(G, weight="weight", tree=None): + """Approximate a solution of the traveling salesman problem + + Compute a 3/2-approximation of the traveling salesman problem + in a complete undirected graph using Christofides [1]_ algorithm. + + Parameters + ---------- + G : Graph + `G` should be a complete weighted undirected graph. + The distance between all pairs of nodes should be included. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + tree : NetworkX graph or None (default: None) + A minimum spanning tree of G. Or, if None, the minimum spanning + tree is computed using :func:`networkx.minimum_spanning_tree` + + Returns + ------- + list + List of nodes in `G` along a cycle with a 3/2-approximation of + the minimal Hamiltonian cycle. + + References + ---------- + .. [1] Christofides, Nicos. "Worst-case analysis of a new heuristic for + the travelling salesman problem." No. RR-388. Carnegie-Mellon Univ + Pittsburgh Pa Management Sciences Research Group, 1976. + """ + # Remove selfloops if necessary + loop_nodes = nx.nodes_with_selfloops(G) + try: + node = next(loop_nodes) + except StopIteration: + pass + else: + G = G.copy() + G.remove_edge(node, node) + G.remove_edges_from((n, n) for n in loop_nodes) + # Check that G is a complete graph + N = len(G) - 1 + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G must be a complete graph.") + + if tree is None: + tree = nx.minimum_spanning_tree(G, weight=weight) + L = G.copy() + L.remove_nodes_from([v for v, degree in tree.degree if not (degree % 2)]) + MG = nx.MultiGraph() + MG.add_edges_from(tree.edges) + edges = nx.min_weight_matching(L, weight=weight) + MG.add_edges_from(edges) + return _shortcutting(nx.eulerian_circuit(MG)) + + +def _shortcutting(circuit): + """Remove duplicate nodes in the path""" + nodes = [] + for u, v in circuit: + if v in nodes: + continue + if not nodes: + nodes.append(u) + nodes.append(v) + nodes.append(nodes[0]) + return nodes + + +@nx._dispatchable(edge_attrs="weight") +def traveling_salesman_problem( + G, weight="weight", nodes=None, cycle=True, method=None, **kwargs +): + """Find the shortest path in `G` connecting specified nodes + + This function allows approximate solution to the traveling salesman + problem on networks that are not complete graphs and/or where the + salesman does not need to visit all nodes. + + This function proceeds in two steps. First, it creates a complete + graph using the all-pairs shortest_paths between nodes in `nodes`. + Edge weights in the new graph are the lengths of the paths + between each pair of nodes in the original graph. + Second, an algorithm (default: `christofides` for undirected and + `asadpour_atsp` for directed) is used to approximate the minimal Hamiltonian + cycle on this new graph. The available algorithms are: + + - christofides + - greedy_tsp + - simulated_annealing_tsp + - threshold_accepting_tsp + - asadpour_atsp + + Once the Hamiltonian Cycle is found, this function post-processes to + accommodate the structure of the original graph. If `cycle` is ``False``, + the biggest weight edge is removed to make a Hamiltonian path. + Then each edge on the new complete graph used for that analysis is + replaced by the shortest_path between those nodes on the original graph. + If the input graph `G` includes edges with weights that do not adhere to + the triangle inequality, such as when `G` is not a complete graph (i.e + length of non-existent edges is infinity), then the returned path may + contain some repeating nodes (other than the starting node). + + Parameters + ---------- + G : NetworkX graph + A possibly weighted graph + + nodes : collection of nodes (default=G.nodes) + collection (list, set, etc.) of nodes to visit + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + cycle : bool (default: True) + Indicates whether a cycle should be returned, or a path. + Note: the cycle is the approximate minimal cycle. + The path simply removes the biggest edge in that cycle. + + method : function (default: None) + A function that returns a cycle on all nodes and approximates + the solution to the traveling salesman problem on a complete + graph. The returned cycle is then used to find a corresponding + solution on `G`. `method` should be callable; take inputs + `G`, and `weight`; and return a list of nodes along the cycle. + + Provided options include :func:`christofides`, :func:`greedy_tsp`, + :func:`simulated_annealing_tsp` and :func:`threshold_accepting_tsp`. + + If `method is None`: use :func:`christofides` for undirected `G` and + :func:`asadpour_atsp` for directed `G`. + + **kwargs : dict + Other keyword arguments to be passed to the `method` function passed in. + + Returns + ------- + list + List of nodes in `G` along a path with an approximation of the minimal + path through `nodes`. + + Raises + ------ + NetworkXError + If `G` is a directed graph it has to be strongly connected or the + complete version cannot be generated. + + Examples + -------- + >>> tsp = nx.approximation.traveling_salesman_problem + >>> G = nx.cycle_graph(9) + >>> G[4][5]["weight"] = 5 # all other weights are 1 + >>> tsp(G, nodes=[3, 6]) + [3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3] + >>> path = tsp(G, cycle=False) + >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4]) + True + + While no longer required, you can still build (curry) your own function + to provide parameter values to the methods. + + >>> SA_tsp = nx.approximation.simulated_annealing_tsp + >>> method = lambda G, weight: SA_tsp(G, "greedy", weight=weight, temp=500) + >>> path = tsp(G, cycle=False, method=method) + >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4]) + True + + Otherwise, pass other keyword arguments directly into the tsp function. + + >>> path = tsp( + ... G, + ... cycle=False, + ... method=nx.approximation.simulated_annealing_tsp, + ... init_cycle="greedy", + ... temp=500, + ... ) + >>> path in ([4, 3, 2, 1, 0, 8, 7, 6, 5], [5, 6, 7, 8, 0, 1, 2, 3, 4]) + True + """ + if method is None: + if G.is_directed(): + method = asadpour_atsp + else: + method = christofides + if nodes is None: + nodes = list(G.nodes) + + dist = {} + path = {} + for n, (d, p) in nx.all_pairs_dijkstra(G, weight=weight): + dist[n] = d + path[n] = p + + if G.is_directed(): + # If the graph is not strongly connected, raise an exception + if not nx.is_strongly_connected(G): + raise nx.NetworkXError("G is not strongly connected") + GG = nx.DiGraph() + else: + GG = nx.Graph() + for u in nodes: + for v in nodes: + if u == v: + continue + # Ensure that the weight attribute on GG has the + # same name as the input graph + GG.add_edge(u, v, **{weight: dist[u][v]}) + + best_GG = method(GG, weight=weight, **kwargs) + + if not cycle: + # find and remove the biggest edge + (u, v) = max(pairwise(best_GG), key=lambda x: dist[x[0]][x[1]]) + pos = best_GG.index(u) + 1 + while best_GG[pos] != v: + pos = best_GG[pos:].index(u) + 1 + best_GG = best_GG[pos:-1] + best_GG[:pos] + + best_path = [] + for u, v in pairwise(best_GG): + best_path.extend(path[u][v][:-1]) + best_path.append(v) + return best_path + + +@not_implemented_for("undirected") +@py_random_state(2) +@nx._dispatchable(edge_attrs="weight", mutates_input=True) +def asadpour_atsp(G, weight="weight", seed=None, source=None): + """ + Returns an approximate solution to the traveling salesman problem. + + This approximate solution is one of the best known approximations for the + asymmetric traveling salesman problem developed by Asadpour et al, + [1]_. The algorithm first solves the Held-Karp relaxation to find a lower + bound for the weight of the cycle. Next, it constructs an exponential + distribution of undirected spanning trees where the probability of an + edge being in the tree corresponds to the weight of that edge using a + maximum entropy rounding scheme. Next we sample that distribution + $2 \\lceil \\ln n \\rceil$ times and save the minimum sampled tree once the + direction of the arcs is added back to the edges. Finally, we augment + then short circuit that graph to find the approximate tour for the + salesman. + + Parameters + ---------- + G : nx.DiGraph + The graph should be a complete weighted directed graph. The + distance between all paris of nodes should be included and the triangle + inequality should hold. That is, the direct edge between any two nodes + should be the path of least cost. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + source : node label (default=`None`) + If given, return the cycle starting and ending at the given node. + + Returns + ------- + cycle : list of nodes + Returns the cycle (list of nodes) that a salesman can follow to minimize + the total weight of the trip. + + Raises + ------ + NetworkXError + If `G` is not complete or has less than two nodes, the algorithm raises + an exception. + + NetworkXError + If `source` is not `None` and is not a node in `G`, the algorithm raises + an exception. + + NetworkXNotImplemented + If `G` is an undirected graph. + + References + ---------- + .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi, + An o(log n/log log n)-approximation algorithm for the asymmetric + traveling salesman problem, Operations research, 65 (2017), + pp. 1043–1061 + + Examples + -------- + >>> import networkx as nx + >>> import networkx.algorithms.approximation as approx + >>> G = nx.complete_graph(3, create_using=nx.DiGraph) + >>> nx.set_edge_attributes( + ... G, + ... {(0, 1): 2, (1, 2): 2, (2, 0): 2, (0, 2): 1, (2, 1): 1, (1, 0): 1}, + ... "weight", + ... ) + >>> tour = approx.asadpour_atsp(G, source=0) + >>> tour + [0, 2, 1, 0] + """ + from math import ceil, exp + from math import log as ln + + # Check that G is a complete graph + N = len(G) - 1 + if N < 1: + raise nx.NetworkXError("G must have at least two nodes") + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G is not a complete DiGraph") + # Check that the source vertex, if given, is in the graph + if source is not None and source not in G.nodes: + raise nx.NetworkXError("Given source node not in G.") + # handle 2 node case + if N == 1: + if source is None: + return list(G) + return [source, next(n for n in G if n != source)] + + opt_hk, z_star = held_karp_ascent(G, weight) + + # Test to see if the ascent method found an integer solution or a fractional + # solution. If it is integral then z_star is a nx.Graph, otherwise it is + # a dict + if not isinstance(z_star, dict): + # Here we are using the shortcutting method to go from the list of edges + # returned from eulerian_circuit to a list of nodes + return _shortcutting(nx.eulerian_circuit(z_star, source=source)) + + # Create the undirected support of z_star + z_support = nx.MultiGraph() + for u, v in z_star: + if (u, v) not in z_support.edges: + edge_weight = min(G[u][v][weight], G[v][u][weight]) + z_support.add_edge(u, v, **{weight: edge_weight}) + + # Create the exponential distribution of spanning trees + gamma = spanning_tree_distribution(z_support, z_star) + + # Write the lambda values to the edges of z_support + z_support = nx.Graph(z_support) + lambda_dict = {(u, v): exp(gamma[(u, v)]) for u, v in z_support.edges()} + nx.set_edge_attributes(z_support, lambda_dict, "weight") + del gamma, lambda_dict + + # Sample 2 * ceil( ln(n) ) spanning trees and record the minimum one + minimum_sampled_tree = None + minimum_sampled_tree_weight = math.inf + for _ in range(2 * ceil(ln(G.number_of_nodes()))): + sampled_tree = random_spanning_tree(z_support, "weight", seed=seed) + sampled_tree_weight = sampled_tree.size(weight) + if sampled_tree_weight < minimum_sampled_tree_weight: + minimum_sampled_tree = sampled_tree.copy() + minimum_sampled_tree_weight = sampled_tree_weight + + # Orient the edges in that tree to keep the cost of the tree the same. + t_star = nx.MultiDiGraph() + for u, v, d in minimum_sampled_tree.edges(data=weight): + if d == G[u][v][weight]: + t_star.add_edge(u, v, **{weight: d}) + else: + t_star.add_edge(v, u, **{weight: d}) + + # Find the node demands needed to neutralize the flow of t_star in G + node_demands = {n: t_star.out_degree(n) - t_star.in_degree(n) for n in t_star} + nx.set_node_attributes(G, node_demands, "demand") + + # Find the min_cost_flow + flow_dict = nx.min_cost_flow(G, "demand") + + # Build the flow into t_star + for source, values in flow_dict.items(): + for target in values: + if (source, target) not in t_star.edges and values[target] > 0: + # IF values[target] > 0 we have to add that many edges + for _ in range(values[target]): + t_star.add_edge(source, target) + + # Return the shortcut eulerian circuit + circuit = nx.eulerian_circuit(t_star, source=source) + return _shortcutting(circuit) + + +@nx._dispatchable(edge_attrs="weight", mutates_input=True, returns_graph=True) +def held_karp_ascent(G, weight="weight"): + """ + Minimizes the Held-Karp relaxation of the TSP for `G` + + Solves the Held-Karp relaxation of the input complete digraph and scales + the output solution for use in the Asadpour [1]_ ASTP algorithm. + + The Held-Karp relaxation defines the lower bound for solutions to the + ATSP, although it does return a fractional solution. This is used in the + Asadpour algorithm as an initial solution which is later rounded to a + integral tree within the spanning tree polytopes. This function solves + the relaxation with the branch and bound method in [2]_. + + Parameters + ---------- + G : nx.DiGraph + The graph should be a complete weighted directed graph. + The distance between all paris of nodes should be included. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + Returns + ------- + OPT : float + The cost for the optimal solution to the Held-Karp relaxation + z : dict or nx.Graph + A symmetrized and scaled version of the optimal solution to the + Held-Karp relaxation for use in the Asadpour algorithm. + + If an integral solution is found, then that is an optimal solution for + the ATSP problem and that is returned instead. + + References + ---------- + .. [1] A. Asadpour, M. X. Goemans, A. Madry, S. O. Gharan, and A. Saberi, + An o(log n/log log n)-approximation algorithm for the asymmetric + traveling salesman problem, Operations research, 65 (2017), + pp. 1043–1061 + + .. [2] M. Held, R. M. Karp, The traveling-salesman problem and minimum + spanning trees, Operations Research, 1970-11-01, Vol. 18 (6), + pp.1138-1162 + """ + import numpy as np + import scipy as sp + + def k_pi(): + """ + Find the set of minimum 1-Arborescences for G at point pi. + + Returns + ------- + Set + The set of minimum 1-Arborescences + """ + # Create a copy of G without vertex 1. + G_1 = G.copy() + minimum_1_arborescences = set() + minimum_1_arborescence_weight = math.inf + + # node is node '1' in the Held and Karp paper + n = next(G.__iter__()) + G_1.remove_node(n) + + # Iterate over the spanning arborescences of the graph until we know + # that we have found the minimum 1-arborescences. My proposed strategy + # is to find the most extensive root to connect to from 'node 1' and + # the least expensive one. We then iterate over arborescences until + # the cost of the basic arborescence is the cost of the minimum one + # plus the difference between the most and least expensive roots, + # that way the cost of connecting 'node 1' will by definition not by + # minimum + min_root = {"node": None, weight: math.inf} + max_root = {"node": None, weight: -math.inf} + for u, v, d in G.edges(n, data=True): + if d[weight] < min_root[weight]: + min_root = {"node": v, weight: d[weight]} + if d[weight] > max_root[weight]: + max_root = {"node": v, weight: d[weight]} + + min_in_edge = min(G.in_edges(n, data=True), key=lambda x: x[2][weight]) + min_root[weight] = min_root[weight] + min_in_edge[2][weight] + max_root[weight] = max_root[weight] + min_in_edge[2][weight] + + min_arb_weight = math.inf + for arb in nx.ArborescenceIterator(G_1): + arb_weight = arb.size(weight) + if min_arb_weight == math.inf: + min_arb_weight = arb_weight + elif arb_weight > min_arb_weight + max_root[weight] - min_root[weight]: + break + # We have to pick the root node of the arborescence for the out + # edge of the first vertex as that is the only node without an + # edge directed into it. + for N, deg in arb.in_degree: + if deg == 0: + # root found + arb.add_edge(n, N, **{weight: G[n][N][weight]}) + arb_weight += G[n][N][weight] + break + + # We can pick the minimum weight in-edge for the vertex with + # a cycle. If there are multiple edges with the same, minimum + # weight, We need to add all of them. + # + # Delete the edge (N, v) so that we cannot pick it. + edge_data = G[N][n] + G.remove_edge(N, n) + min_weight = min(G.in_edges(n, data=weight), key=lambda x: x[2])[2] + min_edges = [ + (u, v, d) for u, v, d in G.in_edges(n, data=weight) if d == min_weight + ] + for u, v, d in min_edges: + new_arb = arb.copy() + new_arb.add_edge(u, v, **{weight: d}) + new_arb_weight = arb_weight + d + # Check to see the weight of the arborescence, if it is a + # new minimum, clear all of the old potential minimum + # 1-arborescences and add this is the only one. If its + # weight is above the known minimum, do not add it. + if new_arb_weight < minimum_1_arborescence_weight: + minimum_1_arborescences.clear() + minimum_1_arborescence_weight = new_arb_weight + # We have a 1-arborescence, add it to the set + if new_arb_weight == minimum_1_arborescence_weight: + minimum_1_arborescences.add(new_arb) + G.add_edge(N, n, **edge_data) + + return minimum_1_arborescences + + def direction_of_ascent(): + """ + Find the direction of ascent at point pi. + + See [1]_ for more information. + + Returns + ------- + dict + A mapping from the nodes of the graph which represents the direction + of ascent. + + References + ---------- + .. [1] M. Held, R. M. Karp, The traveling-salesman problem and minimum + spanning trees, Operations Research, 1970-11-01, Vol. 18 (6), + pp.1138-1162 + """ + # 1. Set d equal to the zero n-vector. + d = {} + for n in G: + d[n] = 0 + del n + # 2. Find a 1-Arborescence T^k such that k is in K(pi, d). + minimum_1_arborescences = k_pi() + while True: + # Reduce K(pi) to K(pi, d) + # Find the arborescence in K(pi) which increases the lest in + # direction d + min_k_d_weight = math.inf + min_k_d = None + for arborescence in minimum_1_arborescences: + weighted_cost = 0 + for n, deg in arborescence.degree: + weighted_cost += d[n] * (deg - 2) + if weighted_cost < min_k_d_weight: + min_k_d_weight = weighted_cost + min_k_d = arborescence + + # 3. If sum of d_i * v_{i, k} is greater than zero, terminate + if min_k_d_weight > 0: + return d, min_k_d + # 4. d_i = d_i + v_{i, k} + for n, deg in min_k_d.degree: + d[n] += deg - 2 + # Check that we do not need to terminate because the direction + # of ascent does not exist. This is done with linear + # programming. + c = np.full(len(minimum_1_arborescences), -1, dtype=int) + a_eq = np.empty((len(G) + 1, len(minimum_1_arborescences)), dtype=int) + b_eq = np.zeros(len(G) + 1, dtype=int) + b_eq[len(G)] = 1 + for arb_count, arborescence in enumerate(minimum_1_arborescences): + n_count = len(G) - 1 + for n, deg in arborescence.degree: + a_eq[n_count][arb_count] = deg - 2 + n_count -= 1 + a_eq[len(G)][arb_count] = 1 + program_result = sp.optimize.linprog( + c, A_eq=a_eq, b_eq=b_eq, method="highs-ipm" + ) + # If the constants exist, then the direction of ascent doesn't + if program_result.success: + # There is no direction of ascent + return None, minimum_1_arborescences + + # 5. GO TO 2 + + def find_epsilon(k, d): + """ + Given the direction of ascent at pi, find the maximum distance we can go + in that direction. + + Parameters + ---------- + k_xy : set + The set of 1-arborescences which have the minimum rate of increase + in the direction of ascent + + d : dict + The direction of ascent + + Returns + ------- + float + The distance we can travel in direction `d` + """ + min_epsilon = math.inf + for e_u, e_v, e_w in G.edges(data=weight): + if (e_u, e_v) in k.edges: + continue + # Now, I have found a condition which MUST be true for the edges to + # be a valid substitute. The edge in the graph which is the + # substitute is the one with the same terminated end. This can be + # checked rather simply. + # + # Find the edge within k which is the substitute. Because k is a + # 1-arborescence, we know that they is only one such edges + # leading into every vertex. + if len(k.in_edges(e_v, data=weight)) > 1: + raise Exception + sub_u, sub_v, sub_w = next(k.in_edges(e_v, data=weight).__iter__()) + k.add_edge(e_u, e_v, **{weight: e_w}) + k.remove_edge(sub_u, sub_v) + if ( + max(d for n, d in k.in_degree()) <= 1 + and len(G) == k.number_of_edges() + and nx.is_weakly_connected(k) + ): + # Ascent method calculation + if d[sub_u] == d[e_u] or sub_w == e_w: + # Revert to the original graph + k.remove_edge(e_u, e_v) + k.add_edge(sub_u, sub_v, **{weight: sub_w}) + continue + epsilon = (sub_w - e_w) / (d[e_u] - d[sub_u]) + if 0 < epsilon < min_epsilon: + min_epsilon = epsilon + # Revert to the original graph + k.remove_edge(e_u, e_v) + k.add_edge(sub_u, sub_v, **{weight: sub_w}) + + return min_epsilon + + # I have to know that the elements in pi correspond to the correct elements + # in the direction of ascent, even if the node labels are not integers. + # Thus, I will use dictionaries to made that mapping. + pi_dict = {} + for n in G: + pi_dict[n] = 0 + del n + original_edge_weights = {} + for u, v, d in G.edges(data=True): + original_edge_weights[(u, v)] = d[weight] + dir_ascent, k_d = direction_of_ascent() + while dir_ascent is not None: + max_distance = find_epsilon(k_d, dir_ascent) + for n, v in dir_ascent.items(): + pi_dict[n] += max_distance * v + for u, v, d in G.edges(data=True): + d[weight] = original_edge_weights[(u, v)] + pi_dict[u] + dir_ascent, k_d = direction_of_ascent() + nx._clear_cache(G) + # k_d is no longer an individual 1-arborescence but rather a set of + # minimal 1-arborescences at the maximum point of the polytope and should + # be reflected as such + k_max = k_d + + # Search for a cycle within k_max. If a cycle exists, return it as the + # solution + for k in k_max: + if len([n for n in k if k.degree(n) == 2]) == G.order(): + # Tour found + # TODO: this branch does not restore original_edge_weights of G! + return k.size(weight), k + + # Write the original edge weights back to G and every member of k_max at + # the maximum point. Also average the number of times that edge appears in + # the set of minimal 1-arborescences. + x_star = {} + size_k_max = len(k_max) + for u, v, d in G.edges(data=True): + edge_count = 0 + d[weight] = original_edge_weights[(u, v)] + for k in k_max: + if (u, v) in k.edges(): + edge_count += 1 + k[u][v][weight] = original_edge_weights[(u, v)] + x_star[(u, v)] = edge_count / size_k_max + # Now symmetrize the edges in x_star and scale them according to (5) in + # reference [1] + z_star = {} + scale_factor = (G.order() - 1) / G.order() + for u, v in x_star: + frequency = x_star[(u, v)] + x_star[(v, u)] + if frequency > 0: + z_star[(u, v)] = scale_factor * frequency + del x_star + # Return the optimal weight and the z dict + return next(k_max.__iter__()).size(weight), z_star + + +@nx._dispatchable +def spanning_tree_distribution(G, z): + """ + Find the asadpour exponential distribution of spanning trees. + + Solves the Maximum Entropy Convex Program in the Asadpour algorithm [1]_ + using the approach in section 7 to build an exponential distribution of + undirected spanning trees. + + This algorithm ensures that the probability of any edge in a spanning + tree is proportional to the sum of the probabilities of the tress + containing that edge over the sum of the probabilities of all spanning + trees of the graph. + + Parameters + ---------- + G : nx.MultiGraph + The undirected support graph for the Held Karp relaxation + + z : dict + The output of `held_karp_ascent()`, a scaled version of the Held-Karp + solution. + + Returns + ------- + gamma : dict + The probability distribution which approximately preserves the marginal + probabilities of `z`. + """ + from math import exp + from math import log as ln + + def q(e): + """ + The value of q(e) is described in the Asadpour paper is "the + probability that edge e will be included in a spanning tree T that is + chosen with probability proportional to exp(gamma(T))" which + basically means that it is the total probability of the edge appearing + across the whole distribution. + + Parameters + ---------- + e : tuple + The `(u, v)` tuple describing the edge we are interested in + + Returns + ------- + float + The probability that a spanning tree chosen according to the + current values of gamma will include edge `e`. + """ + # Create the laplacian matrices + for u, v, d in G.edges(data=True): + d[lambda_key] = exp(gamma[(u, v)]) + G_Kirchhoff = nx.number_of_spanning_trees(G, weight=lambda_key) + G_e = nx.contracted_edge(G, e, self_loops=False) + G_e_Kirchhoff = nx.number_of_spanning_trees(G_e, weight=lambda_key) + + # Multiply by the weight of the contracted edge since it is not included + # in the total weight of the contracted graph. + return exp(gamma[(e[0], e[1])]) * G_e_Kirchhoff / G_Kirchhoff + + # initialize gamma to the zero dict + gamma = {} + for u, v, _ in G.edges: + gamma[(u, v)] = 0 + + # set epsilon + EPSILON = 0.2 + + # pick an edge attribute name that is unlikely to be in the graph + lambda_key = "spanning_tree_distribution's secret attribute name for lambda" + + while True: + # We need to know that know that no values of q_e are greater than + # (1 + epsilon) * z_e, however changing one gamma value can increase the + # value of a different q_e, so we have to complete the for loop without + # changing anything for the condition to be meet + in_range_count = 0 + # Search for an edge with q_e > (1 + epsilon) * z_e + for u, v in gamma: + e = (u, v) + q_e = q(e) + z_e = z[e] + if q_e > (1 + EPSILON) * z_e: + delta = ln( + (q_e * (1 - (1 + EPSILON / 2) * z_e)) + / ((1 - q_e) * (1 + EPSILON / 2) * z_e) + ) + gamma[e] -= delta + # Check that delta had the desired effect + new_q_e = q(e) + desired_q_e = (1 + EPSILON / 2) * z_e + if round(new_q_e, 8) != round(desired_q_e, 8): + raise nx.NetworkXError( + f"Unable to modify probability for edge ({u}, {v})" + ) + else: + in_range_count += 1 + # Check if the for loop terminated without changing any gamma + if in_range_count == len(gamma): + break + + # Remove the new edge attributes + for _, _, d in G.edges(data=True): + if lambda_key in d: + del d[lambda_key] + + return gamma + + +@nx._dispatchable(edge_attrs="weight") +def greedy_tsp(G, weight="weight", source=None): + """Return a low cost cycle starting at `source` and its cost. + + This approximates a solution to the traveling salesman problem. + It finds a cycle of all the nodes that a salesman can visit in order + to visit many nodes while minimizing total distance. + It uses a simple greedy algorithm. + In essence, this function returns a large cycle given a source point + for which the total cost of the cycle is minimized. + + Parameters + ---------- + G : Graph + The Graph should be a complete weighted undirected graph. + The distance between all pairs of nodes should be included. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + source : node, optional (default: first node in list(G)) + Starting node. If None, defaults to ``next(iter(G))`` + + Returns + ------- + cycle : list of nodes + Returns the cycle (list of nodes) that a salesman + can follow to minimize total weight of the trip. + + Raises + ------ + NetworkXError + If `G` is not complete, the algorithm raises an exception. + + Examples + -------- + >>> from networkx.algorithms import approximation as approx + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) + >>> cycle = approx.greedy_tsp(G, source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + + Notes + ----- + This implementation of a greedy algorithm is based on the following: + + - The algorithm adds a node to the solution at every iteration. + - The algorithm selects a node not already in the cycle whose connection + to the previous node adds the least cost to the cycle. + + A greedy algorithm does not always give the best solution. + However, it can construct a first feasible solution which can + be passed as a parameter to an iterative improvement algorithm such + as Simulated Annealing, or Threshold Accepting. + + Time complexity: It has a running time $O(|V|^2)$ + """ + # Check that G is a complete graph + N = len(G) - 1 + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G must be a complete graph.") + + if source is None: + source = nx.utils.arbitrary_element(G) + + if G.number_of_nodes() == 2: + neighbor = next(G.neighbors(source)) + return [source, neighbor, source] + + nodeset = set(G) + nodeset.remove(source) + cycle = [source] + next_node = source + while nodeset: + nbrdict = G[next_node] + next_node = min(nodeset, key=lambda n: nbrdict[n].get(weight, 1)) + cycle.append(next_node) + nodeset.remove(next_node) + cycle.append(cycle[0]) + return cycle + + +@py_random_state(9) +@nx._dispatchable(edge_attrs="weight") +def simulated_annealing_tsp( + G, + init_cycle, + weight="weight", + source=None, + temp=100, + move="1-1", + max_iterations=10, + N_inner=100, + alpha=0.01, + seed=None, +): + """Returns an approximate solution to the traveling salesman problem. + + This function uses simulated annealing to approximate the minimal cost + cycle through the nodes. Starting from a suboptimal solution, simulated + annealing perturbs that solution, occasionally accepting changes that make + the solution worse to escape from a locally optimal solution. The chance + of accepting such changes decreases over the iterations to encourage + an optimal result. In summary, the function returns a cycle starting + at `source` for which the total cost is minimized. It also returns the cost. + + The chance of accepting a proposed change is related to a parameter called + the temperature (annealing has a physical analogue of steel hardening + as it cools). As the temperature is reduced, the chance of moves that + increase cost goes down. + + Parameters + ---------- + G : Graph + `G` should be a complete weighted graph. + The distance between all pairs of nodes should be included. + + init_cycle : list of all nodes or "greedy" + The initial solution (a cycle through all nodes returning to the start). + This argument has no default to make you think about it. + If "greedy", use `greedy_tsp(G, weight)`. + Other common starting cycles are `list(G) + [next(iter(G))]` or the final + result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + source : node, optional (default: first node in list(G)) + Starting node. If None, defaults to ``next(iter(G))`` + + temp : int, optional (default=100) + The algorithm's temperature parameter. It represents the initial + value of temperature + + move : "1-1" or "1-0" or function, optional (default="1-1") + Indicator of what move to use when finding new trial solutions. + Strings indicate two special built-in moves: + + - "1-1": 1-1 exchange which transposes the position + of two elements of the current solution. + The function called is :func:`swap_two_nodes`. + For example if we apply 1-1 exchange in the solution + ``A = [3, 2, 1, 4, 3]`` + we can get the following by the transposition of 1 and 4 elements: + ``A' = [3, 2, 4, 1, 3]`` + - "1-0": 1-0 exchange which moves an node in the solution + to a new position. + The function called is :func:`move_one_node`. + For example if we apply 1-0 exchange in the solution + ``A = [3, 2, 1, 4, 3]`` + we can transfer the fourth element to the second position: + ``A' = [3, 4, 2, 1, 3]`` + + You may provide your own functions to enact a move from + one solution to a neighbor solution. The function must take + the solution as input along with a `seed` input to control + random number generation (see the `seed` input here). + Your function should maintain the solution as a cycle with + equal first and last node and all others appearing once. + Your function should return the new solution. + + max_iterations : int, optional (default=10) + Declared done when this number of consecutive iterations of + the outer loop occurs without any change in the best cost solution. + + N_inner : int, optional (default=100) + The number of iterations of the inner loop. + + alpha : float between (0, 1), optional (default=0.01) + Percentage of temperature decrease in each iteration + of outer loop + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + cycle : list of nodes + Returns the cycle (list of nodes) that a salesman + can follow to minimize total weight of the trip. + + Raises + ------ + NetworkXError + If `G` is not complete the algorithm raises an exception. + + Examples + -------- + >>> from networkx.algorithms import approximation as approx + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) + >>> cycle = approx.simulated_annealing_tsp(G, "greedy", source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + >>> incycle = ["D", "B", "A", "C", "D"] + >>> cycle = approx.simulated_annealing_tsp(G, incycle, source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + + Notes + ----- + Simulated Annealing is a metaheuristic local search algorithm. + The main characteristic of this algorithm is that it accepts + even solutions which lead to the increase of the cost in order + to escape from low quality local optimal solutions. + + This algorithm needs an initial solution. If not provided, it is + constructed by a simple greedy algorithm. At every iteration, the + algorithm selects thoughtfully a neighbor solution. + Consider $c(x)$ cost of current solution and $c(x')$ cost of a + neighbor solution. + If $c(x') - c(x) <= 0$ then the neighbor solution becomes the current + solution for the next iteration. Otherwise, the algorithm accepts + the neighbor solution with probability $p = exp - ([c(x') - c(x)] / temp)$. + Otherwise the current solution is retained. + + `temp` is a parameter of the algorithm and represents temperature. + + Time complexity: + For $N_i$ iterations of the inner loop and $N_o$ iterations of the + outer loop, this algorithm has running time $O(N_i * N_o * |V|)$. + + For more information and how the algorithm is inspired see: + http://en.wikipedia.org/wiki/Simulated_annealing + """ + if move == "1-1": + move = swap_two_nodes + elif move == "1-0": + move = move_one_node + if init_cycle == "greedy": + # Construct an initial solution using a greedy algorithm. + cycle = greedy_tsp(G, weight=weight, source=source) + if G.number_of_nodes() == 2: + return cycle + + else: + cycle = list(init_cycle) + if source is None: + source = cycle[0] + elif source != cycle[0]: + raise nx.NetworkXError("source must be first node in init_cycle") + if cycle[0] != cycle[-1]: + raise nx.NetworkXError("init_cycle must be a cycle. (return to start)") + + if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G): + raise nx.NetworkXError("init_cycle should be a cycle over all nodes in G.") + + # Check that G is a complete graph + N = len(G) - 1 + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G must be a complete graph.") + + if G.number_of_nodes() == 2: + neighbor = next(G.neighbors(source)) + return [source, neighbor, source] + + # Find the cost of initial solution + cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle)) + + count = 0 + best_cycle = cycle.copy() + best_cost = cost + while count <= max_iterations and temp > 0: + count += 1 + for i in range(N_inner): + adj_sol = move(cycle, seed) + adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol)) + delta = adj_cost - cost + if delta <= 0: + # Set current solution the adjacent solution. + cycle = adj_sol + cost = adj_cost + + if cost < best_cost: + count = 0 + best_cycle = cycle.copy() + best_cost = cost + else: + # Accept even a worse solution with probability p. + p = math.exp(-delta / temp) + if p >= seed.random(): + cycle = adj_sol + cost = adj_cost + temp -= temp * alpha + + return best_cycle + + +@py_random_state(9) +@nx._dispatchable(edge_attrs="weight") +def threshold_accepting_tsp( + G, + init_cycle, + weight="weight", + source=None, + threshold=1, + move="1-1", + max_iterations=10, + N_inner=100, + alpha=0.1, + seed=None, +): + """Returns an approximate solution to the traveling salesman problem. + + This function uses threshold accepting methods to approximate the minimal cost + cycle through the nodes. Starting from a suboptimal solution, threshold + accepting methods perturb that solution, accepting any changes that make + the solution no worse than increasing by a threshold amount. Improvements + in cost are accepted, but so are changes leading to small increases in cost. + This allows the solution to leave suboptimal local minima in solution space. + The threshold is decreased slowly as iterations proceed helping to ensure + an optimum. In summary, the function returns a cycle starting at `source` + for which the total cost is minimized. + + Parameters + ---------- + G : Graph + `G` should be a complete weighted graph. + The distance between all pairs of nodes should be included. + + init_cycle : list or "greedy" + The initial solution (a cycle through all nodes returning to the start). + This argument has no default to make you think about it. + If "greedy", use `greedy_tsp(G, weight)`. + Other common starting cycles are `list(G) + [next(iter(G))]` or the final + result of `simulated_annealing_tsp` when doing `threshold_accepting_tsp`. + + weight : string, optional (default="weight") + Edge data key corresponding to the edge weight. + If any edge does not have this attribute the weight is set to 1. + + source : node, optional (default: first node in list(G)) + Starting node. If None, defaults to ``next(iter(G))`` + + threshold : int, optional (default=1) + The algorithm's threshold parameter. It represents the initial + threshold's value + + move : "1-1" or "1-0" or function, optional (default="1-1") + Indicator of what move to use when finding new trial solutions. + Strings indicate two special built-in moves: + + - "1-1": 1-1 exchange which transposes the position + of two elements of the current solution. + The function called is :func:`swap_two_nodes`. + For example if we apply 1-1 exchange in the solution + ``A = [3, 2, 1, 4, 3]`` + we can get the following by the transposition of 1 and 4 elements: + ``A' = [3, 2, 4, 1, 3]`` + - "1-0": 1-0 exchange which moves an node in the solution + to a new position. + The function called is :func:`move_one_node`. + For example if we apply 1-0 exchange in the solution + ``A = [3, 2, 1, 4, 3]`` + we can transfer the fourth element to the second position: + ``A' = [3, 4, 2, 1, 3]`` + + You may provide your own functions to enact a move from + one solution to a neighbor solution. The function must take + the solution as input along with a `seed` input to control + random number generation (see the `seed` input here). + Your function should maintain the solution as a cycle with + equal first and last node and all others appearing once. + Your function should return the new solution. + + max_iterations : int, optional (default=10) + Declared done when this number of consecutive iterations of + the outer loop occurs without any change in the best cost solution. + + N_inner : int, optional (default=100) + The number of iterations of the inner loop. + + alpha : float between (0, 1), optional (default=0.1) + Percentage of threshold decrease when there is at + least one acceptance of a neighbor solution. + If no inner loop moves are accepted the threshold remains unchanged. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + cycle : list of nodes + Returns the cycle (list of nodes) that a salesman + can follow to minimize total weight of the trip. + + Raises + ------ + NetworkXError + If `G` is not complete the algorithm raises an exception. + + Examples + -------- + >>> from networkx.algorithms import approximation as approx + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... { + ... ("A", "B", 3), + ... ("A", "C", 17), + ... ("A", "D", 14), + ... ("B", "A", 3), + ... ("B", "C", 12), + ... ("B", "D", 16), + ... ("C", "A", 13), + ... ("C", "B", 12), + ... ("C", "D", 4), + ... ("D", "A", 14), + ... ("D", "B", 15), + ... ("D", "C", 2), + ... } + ... ) + >>> cycle = approx.threshold_accepting_tsp(G, "greedy", source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + >>> incycle = ["D", "B", "A", "C", "D"] + >>> cycle = approx.threshold_accepting_tsp(G, incycle, source="D") + >>> cost = sum(G[n][nbr]["weight"] for n, nbr in nx.utils.pairwise(cycle)) + >>> cycle + ['D', 'C', 'B', 'A', 'D'] + >>> cost + 31 + + Notes + ----- + Threshold Accepting is a metaheuristic local search algorithm. + The main characteristic of this algorithm is that it accepts + even solutions which lead to the increase of the cost in order + to escape from low quality local optimal solutions. + + This algorithm needs an initial solution. This solution can be + constructed by a simple greedy algorithm. At every iteration, it + selects thoughtfully a neighbor solution. + Consider $c(x)$ cost of current solution and $c(x')$ cost of + neighbor solution. + If $c(x') - c(x) <= threshold$ then the neighbor solution becomes the current + solution for the next iteration, where the threshold is named threshold. + + In comparison to the Simulated Annealing algorithm, the Threshold + Accepting algorithm does not accept very low quality solutions + (due to the presence of the threshold value). In the case of + Simulated Annealing, even a very low quality solution can + be accepted with probability $p$. + + Time complexity: + It has a running time $O(m * n * |V|)$ where $m$ and $n$ are the number + of times the outer and inner loop run respectively. + + For more information and how algorithm is inspired see: + https://doi.org/10.1016/0021-9991(90)90201-B + + See Also + -------- + simulated_annealing_tsp + + """ + if move == "1-1": + move = swap_two_nodes + elif move == "1-0": + move = move_one_node + if init_cycle == "greedy": + # Construct an initial solution using a greedy algorithm. + cycle = greedy_tsp(G, weight=weight, source=source) + if G.number_of_nodes() == 2: + return cycle + + else: + cycle = list(init_cycle) + if source is None: + source = cycle[0] + elif source != cycle[0]: + raise nx.NetworkXError("source must be first node in init_cycle") + if cycle[0] != cycle[-1]: + raise nx.NetworkXError("init_cycle must be a cycle. (return to start)") + + if len(cycle) - 1 != len(G) or len(set(G.nbunch_iter(cycle))) != len(G): + raise nx.NetworkXError("init_cycle is not all and only nodes.") + + # Check that G is a complete graph + N = len(G) - 1 + # This check ignores selfloops which is what we want here. + if any(len(nbrdict) - (n in nbrdict) != N for n, nbrdict in G.adj.items()): + raise nx.NetworkXError("G must be a complete graph.") + + if G.number_of_nodes() == 2: + neighbor = list(G.neighbors(source))[0] + return [source, neighbor, source] + + # Find the cost of initial solution + cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(cycle)) + + count = 0 + best_cycle = cycle.copy() + best_cost = cost + while count <= max_iterations: + count += 1 + accepted = False + for i in range(N_inner): + adj_sol = move(cycle, seed) + adj_cost = sum(G[u][v].get(weight, 1) for u, v in pairwise(adj_sol)) + delta = adj_cost - cost + if delta <= threshold: + accepted = True + + # Set current solution the adjacent solution. + cycle = adj_sol + cost = adj_cost + + if cost < best_cost: + count = 0 + best_cycle = cycle.copy() + best_cost = cost + if accepted: + threshold -= threshold * alpha + + return best_cycle diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py new file mode 100644 index 0000000..ef1b2f7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/treewidth.py @@ -0,0 +1,255 @@ +"""Functions for computing treewidth decomposition. + +Treewidth of an undirected graph is a number associated with the graph. +It can be defined as the size of the largest vertex set (bag) in a tree +decomposition of the graph minus one. + +`Wikipedia: Treewidth `_ + +The notions of treewidth and tree decomposition have gained their +attractiveness partly because many graph and network problems that are +intractable (e.g., NP-hard) on arbitrary graphs become efficiently +solvable (e.g., with a linear time algorithm) when the treewidth of the +input graphs is bounded by a constant [1]_ [2]_. + +There are two different functions for computing a tree decomposition: +:func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`. + +.. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth + computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275. + http://dx.doi.org/10.1016/j.ic.2009.03.008 + +.. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information + and Computing Sciences, Utrecht University. + Technical Report UU-CS-2005-018. + http://www.cs.uu.nl + +.. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*. + https://web.archive.org/web/20210507025929/http://web.eecs.utk.edu/~cphill25/cs594_spring2015_projects/treewidth.pdf + +""" + +import itertools +import sys +from heapq import heapify, heappop, heappush + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["treewidth_min_degree", "treewidth_min_fill_in"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(returns_graph=True) +def treewidth_min_degree(G): + """Returns a treewidth decomposition using the Minimum Degree heuristic. + + The heuristic chooses the nodes according to their degree, i.e., first + the node with the lowest degree is chosen, then the graph is updated + and the corresponding node is removed. Next, a new node with the lowest + degree is chosen, and so on. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + deg_heuristic = MinDegreeHeuristic(G) + return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph)) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(returns_graph=True) +def treewidth_min_fill_in(G): + """Returns a treewidth decomposition using the Minimum Fill-in heuristic. + + The heuristic chooses a node from the graph, where the number of edges + added turning the neighborhood of the chosen node into clique is as + small as possible. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + return treewidth_decomp(G, min_fill_in_heuristic) + + +class MinDegreeHeuristic: + """Implements the Minimum Degree heuristic. + + The heuristic chooses the nodes according to their degree + (number of neighbors), i.e., first the node with the lowest degree is + chosen, then the graph is updated and the corresponding node is + removed. Next, a new node with the lowest degree is chosen, and so on. + """ + + def __init__(self, graph): + self._graph = graph + + # nodes that have to be updated in the heap before each iteration + self._update_nodes = [] + + self._degreeq = [] # a heapq with 3-tuples (degree,unique_id,node) + self.count = itertools.count() + + # build heap with initial degrees + for n in graph: + self._degreeq.append((len(graph[n]), next(self.count), n)) + heapify(self._degreeq) + + def best_node(self, graph): + # update nodes in self._update_nodes + for n in self._update_nodes: + # insert changed degrees into degreeq + heappush(self._degreeq, (len(graph[n]), next(self.count), n)) + + # get the next valid (minimum degree) node + while self._degreeq: + (min_degree, _, elim_node) = heappop(self._degreeq) + if elim_node not in graph or len(graph[elim_node]) != min_degree: + # outdated entry in degreeq + continue + elif min_degree == len(graph) - 1: + # fully connected: abort condition + return None + + # remember to update nodes in the heap before getting the next node + self._update_nodes = graph[elim_node] + return elim_node + + # the heap is empty: abort + return None + + +def min_fill_in_heuristic(graph_dict): + """Implements the Minimum Degree heuristic. + + graph_dict: dict keyed by node to sets of neighbors (no self-loops) + + Returns the node from the graph, where the number of edges added when + turning the neighborhood of the chosen node into clique is as small as + possible. This algorithm chooses the nodes using the Minimum Fill-In + heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses + additional constant memory. + """ + + if len(graph_dict) == 0: + return None + + min_fill_in_node = None + + min_fill_in = sys.maxsize + + # sort nodes by degree + nodes_by_degree = sorted(graph_dict, key=lambda x: len(graph_dict[x])) + min_degree = len(graph_dict[nodes_by_degree[0]]) + + # abort condition (handle complete graph) + if min_degree == len(graph_dict) - 1: + return None + + for node in nodes_by_degree: + num_fill_in = 0 + nbrs = graph_dict[node] + for nbr in nbrs: + # count how many nodes in nbrs current nbr is not connected to + # subtract 1 for the node itself + num_fill_in += len(nbrs - graph_dict[nbr]) - 1 + if num_fill_in >= 2 * min_fill_in: + break + + num_fill_in /= 2 # divide by 2 because of double counting + + if num_fill_in < min_fill_in: # update min-fill-in node + if num_fill_in == 0: + return node + min_fill_in = num_fill_in + min_fill_in_node = node + + return min_fill_in_node + + +@nx._dispatchable(returns_graph=True) +def treewidth_decomp(G, heuristic=min_fill_in_heuristic): + """Returns a treewidth decomposition using the passed heuristic. + + Parameters + ---------- + G : NetworkX graph + heuristic : heuristic function + + Returns + ------- + Treewidth decomposition : (int, Graph) tuple + 2-tuple with treewidth and the corresponding decomposed tree. + """ + + # make dict-of-sets structure + graph_dict = {n: set(G[n]) - {n} for n in G} + + # stack containing nodes and neighbors in the order from the heuristic + node_stack = [] + + # get first node from heuristic + elim_node = heuristic(graph_dict) + while elim_node is not None: + # connect all neighbors with each other + nbrs = graph_dict[elim_node] + for u, v in itertools.permutations(nbrs, 2): + if v not in graph_dict[u]: + graph_dict[u].add(v) + + # push node and its current neighbors on stack + node_stack.append((elim_node, nbrs)) + + # remove node from graph_dict + for u in graph_dict[elim_node]: + graph_dict[u].remove(elim_node) + + del graph_dict[elim_node] + elim_node = heuristic(graph_dict) + + # the abort condition is met; put all remaining nodes into one bag + decomp = nx.Graph() + first_bag = frozenset(graph_dict.keys()) + decomp.add_node(first_bag) + + treewidth = len(first_bag) - 1 + + while node_stack: + # get node and its neighbors from the stack + (curr_node, nbrs) = node_stack.pop() + + # find a bag all neighbors are in + old_bag = None + for bag in decomp.nodes: + if nbrs <= bag: + old_bag = bag + break + + if old_bag is None: + # no old_bag was found: just connect to the first_bag + old_bag = first_bag + + # create new node for decomposition + nbrs.add(curr_node) + new_bag = frozenset(nbrs) + + # update treewidth + treewidth = max(treewidth, len(new_bag) - 1) + + # add edge to decomposition (implicitly also adds the new node) + decomp.add_edge(old_bag, new_bag) + + return treewidth, decomp diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py new file mode 100644 index 0000000..13d7167 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/approximation/vertex_cover.py @@ -0,0 +1,83 @@ +"""Functions for computing an approximate minimum weight vertex cover. + +A |vertex cover|_ is a subset of nodes such that each edge in the graph +is incident to at least one node in the subset. + +.. _vertex cover: https://en.wikipedia.org/wiki/Vertex_cover +.. |vertex cover| replace:: *vertex cover* + +""" + +import networkx as nx + +__all__ = ["min_weighted_vertex_cover"] + + +@nx._dispatchable(node_attrs="weight") +def min_weighted_vertex_cover(G, weight=None): + r"""Returns an approximate minimum weighted vertex cover. + + The set of nodes returned by this function is guaranteed to be a + vertex cover, and the total weight of the set is guaranteed to be at + most twice the total weight of the minimum weight vertex cover. In + other words, + + .. math:: + + w(S) \leq 2 * w(S^*), + + where $S$ is the vertex cover returned by this function, + $S^*$ is the vertex cover of minimum weight out of all vertex + covers of the graph, and $w$ is the function that computes the + sum of the weights of each node in that given set. + + Parameters + ---------- + G : NetworkX graph + + weight : string, optional (default = None) + If None, every node has weight 1. If a string, use this node + attribute as the node weight. A node without this attribute is + assumed to have weight 1. + + Returns + ------- + min_weighted_cover : set + Returns a set of nodes whose weight sum is no more than twice + the weight sum of the minimum weight vertex cover. + + Notes + ----- + For a directed graph, a vertex cover has the same definition: a set + of nodes such that each edge in the graph is incident to at least + one node in the set. Whether the node is the head or tail of the + directed edge is ignored. + + This is the local-ratio algorithm for computing an approximate + vertex cover. The algorithm greedily reduces the costs over edges, + iteratively building a cover. The worst-case runtime of this + implementation is $O(m \log n)$, where $n$ is the number + of nodes and $m$ the number of edges in the graph. + + References + ---------- + .. [1] Bar-Yehuda, R., and Even, S. (1985). "A local-ratio theorem for + approximating the weighted vertex cover problem." + *Annals of Discrete Mathematics*, 25, 27–46 + + + """ + cost = dict(G.nodes(data=weight, default=1)) + # While there are uncovered edges, choose an uncovered and update + # the cost of the remaining edges. + cover = set() + for u, v in G.edges(): + if u in cover or v in cover: + continue + if cost[u] <= cost[v]: + cover.add(u) + cost[v] -= cost[u] + else: + cover.add(v) + cost[u] -= cost[v] + return cover diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py new file mode 100644 index 0000000..4d98886 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__init__.py @@ -0,0 +1,5 @@ +from networkx.algorithms.assortativity.connectivity import * +from networkx.algorithms.assortativity.correlation import * +from networkx.algorithms.assortativity.mixing import * +from networkx.algorithms.assortativity.neighbor_degree import * +from networkx.algorithms.assortativity.pairs import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2dfc570 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-312.pyc new file mode 100644 index 0000000..b4324e3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/connectivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-312.pyc new file mode 100644 index 0000000..9c71a36 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/correlation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-312.pyc new file mode 100644 index 0000000..ed22d16 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/mixing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-312.pyc new file mode 100644 index 0000000..57534f9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/neighbor_degree.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-312.pyc new file mode 100644 index 0000000..b00d7ab Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/__pycache__/pairs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py new file mode 100644 index 0000000..c3fde0d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/connectivity.py @@ -0,0 +1,122 @@ +from collections import defaultdict + +import networkx as nx + +__all__ = ["average_degree_connectivity"] + + +@nx._dispatchable(edge_attrs="weight") +def average_degree_connectivity( + G, source="in+out", target="in+out", nodes=None, weight=None +): + r"""Compute the average degree connectivity of graph. + + The average degree connectivity is the average nearest neighbor degree of + nodes with degree k. For weighted graphs, an analogous measure can + be computed using the weighted average neighbors degree defined in + [1]_, for a node `i`, as + + .. math:: + + k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j + + where `s_i` is the weighted degree of node `i`, + `w_{ij}` is the weight of the edge that links `i` and `j`, + and `N(i)` are the neighbors of node `i`. + + Parameters + ---------- + G : NetworkX graph + + source : "in"|"out"|"in+out" (default:"in+out") + Directed graphs only. Use "in"- or "out"-degree for source node. + + target : "in"|"out"|"in+out" (default:"in+out" + Directed graphs only. Use "in"- or "out"-degree for target node. + + nodes : list or iterable (optional) + Compute neighbor connectivity for these nodes. The default is all + nodes. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used as a weight. + If None, then each edge has weight 1. + + Returns + ------- + d : dict + A dictionary keyed by degree k with the value of average connectivity. + + Raises + ------ + NetworkXError + If either `source` or `target` are not one of 'in', + 'out', or 'in+out'. + If either `source` or `target` is passed for an undirected graph. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> G.edges[1, 2]["weight"] = 3 + >>> nx.average_degree_connectivity(G) + {1: 2.0, 2: 1.5} + >>> nx.average_degree_connectivity(G, weight="weight") + {1: 2.0, 2: 1.75} + + See Also + -------- + average_neighbor_degree + + References + ---------- + .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani, + "The architecture of complex weighted networks". + PNAS 101 (11): 3747–3752 (2004). + """ + # First, determine the type of neighbors and the type of degree to use. + if G.is_directed(): + if source not in ("in", "out", "in+out"): + raise nx.NetworkXError('source must be one of "in", "out", or "in+out"') + if target not in ("in", "out", "in+out"): + raise nx.NetworkXError('target must be one of "in", "out", or "in+out"') + direction = {"out": G.out_degree, "in": G.in_degree, "in+out": G.degree} + neighbor_funcs = { + "out": G.successors, + "in": G.predecessors, + "in+out": G.neighbors, + } + source_degree = direction[source] + target_degree = direction[target] + neighbors = neighbor_funcs[source] + # `reverse` indicates whether to look at the in-edge when + # computing the weight of an edge. + reverse = source == "in" + else: + if source != "in+out" or target != "in+out": + raise nx.NetworkXError( + f"source and target arguments are only supported for directed graphs" + ) + source_degree = G.degree + target_degree = G.degree + neighbors = G.neighbors + reverse = False + dsum = defaultdict(int) + dnorm = defaultdict(int) + # Check if `source_nodes` is actually a single node in the graph. + source_nodes = source_degree(nodes) + if nodes in G: + source_nodes = [(nodes, source_degree(nodes))] + for n, k in source_nodes: + nbrdeg = target_degree(neighbors(n)) + if weight is None: + s = sum(d for n, d in nbrdeg) + else: # weight nbr degree by weight of (n,nbr) edge + if reverse: + s = sum(G[nbr][n].get(weight, 1) * d for nbr, d in nbrdeg) + else: + s = sum(G[n][nbr].get(weight, 1) * d for nbr, d in nbrdeg) + dnorm[k] += source_degree(n, weight=weight) + dsum[k] += s + + # normalize + return {k: avg if dnorm[k] == 0 else avg / dnorm[k] for k, avg in dsum.items()} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py new file mode 100644 index 0000000..52ae7a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/correlation.py @@ -0,0 +1,302 @@ +"""Node assortativity coefficients and correlation measures.""" + +import networkx as nx +from networkx.algorithms.assortativity.mixing import ( + attribute_mixing_matrix, + degree_mixing_matrix, +) +from networkx.algorithms.assortativity.pairs import node_degree_xy + +__all__ = [ + "degree_pearson_correlation_coefficient", + "degree_assortativity_coefficient", + "attribute_assortativity_coefficient", + "numeric_assortativity_coefficient", +] + + +@nx._dispatchable(edge_attrs="weight") +def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None): + """Compute degree assortativity of graph. + + Assortativity measures the similarity of connections + in the graph with respect to the node degree. + + Parameters + ---------- + G : NetworkX graph + + x: string ('in','out') + The degree type for source node (directed graphs only). + + y: string ('in','out') + The degree type for target node (directed graphs only). + + weight: string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + nodes: list or iterable (optional) + Compute degree assortativity only for nodes in container. + The default is all nodes. + + Returns + ------- + r : float + Assortativity of graph by degree. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> r = nx.degree_assortativity_coefficient(G) + >>> print(f"{r:3.1f}") + -0.5 + + See Also + -------- + attribute_assortativity_coefficient + numeric_assortativity_coefficient + degree_mixing_dict + degree_mixing_matrix + + Notes + ----- + This computes Eq. (21) in Ref. [1]_ , where e is the joint + probability distribution (mixing matrix) of the degrees. If G is + directed than the matrix e is the joint probability of the + user-specified degree type for the source and target. + + References + ---------- + .. [1] M. E. J. Newman, Mixing patterns in networks, + Physical Review E, 67 026126, 2003 + .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M. + Edge direction and the structure of networks, PNAS 107, 10815-20 (2010). + """ + if nodes is None: + nodes = G.nodes + + degrees = None + + if G.is_directed(): + indeg = ( + {d for _, d in G.in_degree(nodes, weight=weight)} + if "in" in (x, y) + else set() + ) + outdeg = ( + {d for _, d in G.out_degree(nodes, weight=weight)} + if "out" in (x, y) + else set() + ) + degrees = set.union(indeg, outdeg) + else: + degrees = {d for _, d in G.degree(nodes, weight=weight)} + + mapping = {d: i for i, d in enumerate(degrees)} + M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight, mapping=mapping) + + return _numeric_ac(M, mapping=mapping) + + +@nx._dispatchable(edge_attrs="weight") +def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None): + """Compute degree assortativity of graph. + + Assortativity measures the similarity of connections + in the graph with respect to the node degree. + + This is the same as degree_assortativity_coefficient but uses the + potentially faster scipy.stats.pearsonr function. + + Parameters + ---------- + G : NetworkX graph + + x: string ('in','out') + The degree type for source node (directed graphs only). + + y: string ('in','out') + The degree type for target node (directed graphs only). + + weight: string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + nodes: list or iterable (optional) + Compute pearson correlation of degrees only for specified nodes. + The default is all nodes. + + Returns + ------- + r : float + Assortativity of graph by degree. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> r = nx.degree_pearson_correlation_coefficient(G) + >>> print(f"{r:3.1f}") + -0.5 + + Notes + ----- + This calls scipy.stats.pearsonr. + + References + ---------- + .. [1] M. E. J. Newman, Mixing patterns in networks + Physical Review E, 67 026126, 2003 + .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M. + Edge direction and the structure of networks, PNAS 107, 10815-20 (2010). + """ + import scipy as sp + + xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight) + x, y = zip(*xy) + return float(sp.stats.pearsonr(x, y)[0]) + + +@nx._dispatchable(node_attrs="attribute") +def attribute_assortativity_coefficient(G, attribute, nodes=None): + """Compute assortativity for node attributes. + + Assortativity measures the similarity of connections + in the graph with respect to the given attribute. + + Parameters + ---------- + G : NetworkX graph + + attribute : string + Node attribute key + + nodes: list or iterable (optional) + Compute attribute assortativity for nodes in container. + The default is all nodes. + + Returns + ------- + r: float + Assortativity of graph for given attribute + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_nodes_from([0, 1], color="red") + >>> G.add_nodes_from([2, 3], color="blue") + >>> G.add_edges_from([(0, 1), (2, 3)]) + >>> print(nx.attribute_assortativity_coefficient(G, "color")) + 1.0 + + Notes + ----- + This computes Eq. (2) in Ref. [1]_ , (trace(M)-sum(M^2))/(1-sum(M^2)), + where M is the joint probability distribution (mixing matrix) + of the specified attribute. + + References + ---------- + .. [1] M. E. J. Newman, Mixing patterns in networks, + Physical Review E, 67 026126, 2003 + """ + M = attribute_mixing_matrix(G, attribute, nodes) + return attribute_ac(M) + + +@nx._dispatchable(node_attrs="attribute") +def numeric_assortativity_coefficient(G, attribute, nodes=None): + """Compute assortativity for numerical node attributes. + + Assortativity measures the similarity of connections + in the graph with respect to the given numeric attribute. + + Parameters + ---------- + G : NetworkX graph + + attribute : string + Node attribute key. + + nodes: list or iterable (optional) + Compute numeric assortativity only for attributes of nodes in + container. The default is all nodes. + + Returns + ------- + r: float + Assortativity of graph for given attribute + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_nodes_from([0, 1], size=2) + >>> G.add_nodes_from([2, 3], size=3) + >>> G.add_edges_from([(0, 1), (2, 3)]) + >>> print(nx.numeric_assortativity_coefficient(G, "size")) + 1.0 + + Notes + ----- + This computes Eq. (21) in Ref. [1]_ , which is the Pearson correlation + coefficient of the specified (scalar valued) attribute across edges. + + References + ---------- + .. [1] M. E. J. Newman, Mixing patterns in networks + Physical Review E, 67 026126, 2003 + """ + if nodes is None: + nodes = G.nodes + vals = {G.nodes[n][attribute] for n in nodes} + mapping = {d: i for i, d in enumerate(vals)} + M = attribute_mixing_matrix(G, attribute, nodes, mapping) + return _numeric_ac(M, mapping) + + +def attribute_ac(M): + """Compute assortativity for attribute matrix M. + + Parameters + ---------- + M : numpy.ndarray + 2D ndarray representing the attribute mixing matrix. + + Notes + ----- + This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e^2))/(1-sum(e^2)), + where e is the joint probability distribution (mixing matrix) + of the specified attribute. + + References + ---------- + .. [1] M. E. J. Newman, Mixing patterns in networks, + Physical Review E, 67 026126, 2003 + """ + if M.sum() != 1.0: + M = M / M.sum() + s = (M @ M).sum() + t = M.trace() + r = (t - s) / (1 - s) + return float(r) + + +def _numeric_ac(M, mapping): + # M is a 2D numpy array + # numeric assortativity coefficient, pearsonr + import numpy as np + + if M.sum() != 1.0: + M = M / M.sum() + x = np.array(list(mapping.keys())) + y = x # x and y have the same support + idx = list(mapping.values()) + a = M.sum(axis=0) + b = M.sum(axis=1) + vara = (a[idx] * x**2).sum() - ((a[idx] * x).sum()) ** 2 + varb = (b[idx] * y**2).sum() - ((b[idx] * y).sum()) ** 2 + xy = np.outer(x, y) + ab = np.outer(a[idx], b[idx]) + return float((xy * (M - ab)).sum() / np.sqrt(vara * varb)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py new file mode 100644 index 0000000..1762d4e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/mixing.py @@ -0,0 +1,255 @@ +""" +Mixing matrices for node attributes and degree. +""" + +import networkx as nx +from networkx.algorithms.assortativity.pairs import node_attribute_xy, node_degree_xy +from networkx.utils import dict_to_numpy_array + +__all__ = [ + "attribute_mixing_matrix", + "attribute_mixing_dict", + "degree_mixing_matrix", + "degree_mixing_dict", + "mixing_dict", +] + + +@nx._dispatchable(node_attrs="attribute") +def attribute_mixing_dict(G, attribute, nodes=None, normalized=False): + """Returns dictionary representation of mixing matrix for attribute. + + Parameters + ---------- + G : graph + NetworkX graph object. + + attribute : string + Node attribute key. + + nodes: list or iterable (optional) + Unse nodes in container to build the dict. The default is all nodes. + + normalized : bool (default=False) + Return counts if False or probabilities if True. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_nodes_from([0, 1], color="red") + >>> G.add_nodes_from([2, 3], color="blue") + >>> G.add_edge(1, 3) + >>> d = nx.attribute_mixing_dict(G, "color") + >>> print(d["red"]["blue"]) + 1 + >>> print(d["blue"]["red"]) # d symmetric for undirected graphs + 1 + + Returns + ------- + d : dictionary + Counts or joint probability of occurrence of attribute pairs. + """ + xy_iter = node_attribute_xy(G, attribute, nodes) + return mixing_dict(xy_iter, normalized=normalized) + + +@nx._dispatchable(node_attrs="attribute") +def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True): + """Returns mixing matrix for attribute. + + Parameters + ---------- + G : graph + NetworkX graph object. + + attribute : string + Node attribute key. + + nodes: list or iterable (optional) + Use only nodes in container to build the matrix. The default is + all nodes. + + mapping : dictionary, optional + Mapping from node attribute to integer index in matrix. + If not specified, an arbitrary ordering will be used. + + normalized : bool (default=True) + Return counts if False or probabilities if True. + + Returns + ------- + m: numpy array + Counts or joint probability of occurrence of attribute pairs. + + Notes + ----- + If each node has a unique attribute value, the unnormalized mixing matrix + will be equal to the adjacency matrix. To get a denser mixing matrix, + the rounding can be performed to form groups of nodes with equal values. + For example, the exact height of persons in cm (180.79155222, 163.9080892, + 163.30095355, 167.99016217, 168.21590163, ...) can be rounded to (180, 163, + 163, 168, 168, ...). + + Definitions of attribute mixing matrix vary on whether the matrix + should include rows for attribute values that don't arise. Here we + do not include such empty-rows. But you can force them to appear + by inputting a `mapping` that includes those values. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> gender = {0: "male", 1: "female", 2: "female"} + >>> nx.set_node_attributes(G, gender, "gender") + >>> mapping = {"male": 0, "female": 1} + >>> mix_mat = nx.attribute_mixing_matrix(G, "gender", mapping=mapping) + >>> mix_mat + array([[0. , 0.25], + [0.25, 0.5 ]]) + """ + d = attribute_mixing_dict(G, attribute, nodes) + a = dict_to_numpy_array(d, mapping=mapping) + if normalized: + a = a / a.sum() + return a + + +@nx._dispatchable(edge_attrs="weight") +def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False): + """Returns dictionary representation of mixing matrix for degree. + + Parameters + ---------- + G : graph + NetworkX graph object. + + x: string ('in','out') + The degree type for source node (directed graphs only). + + y: string ('in','out') + The degree type for target node (directed graphs only). + + weight: string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + normalized : bool (default=False) + Return counts if False or probabilities if True. + + Returns + ------- + d: dictionary + Counts or joint probability of occurrence of degree pairs. + """ + xy_iter = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight) + return mixing_dict(xy_iter, normalized=normalized) + + +@nx._dispatchable(edge_attrs="weight") +def degree_mixing_matrix( + G, x="out", y="in", weight=None, nodes=None, normalized=True, mapping=None +): + """Returns mixing matrix for attribute. + + Parameters + ---------- + G : graph + NetworkX graph object. + + x: string ('in','out') + The degree type for source node (directed graphs only). + + y: string ('in','out') + The degree type for target node (directed graphs only). + + nodes: list or iterable (optional) + Build the matrix using only nodes in container. + The default is all nodes. + + weight: string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + normalized : bool (default=True) + Return counts if False or probabilities if True. + + mapping : dictionary, optional + Mapping from node degree to integer index in matrix. + If not specified, an arbitrary ordering will be used. + + Returns + ------- + m: numpy array + Counts, or joint probability, of occurrence of node degree. + + Notes + ----- + Definitions of degree mixing matrix vary on whether the matrix + should include rows for degree values that don't arise. Here we + do not include such empty-rows. But you can force them to appear + by inputting a `mapping` that includes those values. See examples. + + Examples + -------- + >>> G = nx.star_graph(3) + >>> mix_mat = nx.degree_mixing_matrix(G) + >>> mix_mat + array([[0. , 0.5], + [0.5, 0. ]]) + + If you want every possible degree to appear as a row, even if no nodes + have that degree, use `mapping` as follows, + + >>> max_degree = max(deg for n, deg in G.degree) + >>> mapping = {x: x for x in range(max_degree + 1)} # identity mapping + >>> mix_mat = nx.degree_mixing_matrix(G, mapping=mapping) + >>> mix_mat + array([[0. , 0. , 0. , 0. ], + [0. , 0. , 0. , 0.5], + [0. , 0. , 0. , 0. ], + [0. , 0.5, 0. , 0. ]]) + """ + d = degree_mixing_dict(G, x=x, y=y, nodes=nodes, weight=weight) + a = dict_to_numpy_array(d, mapping=mapping) + if normalized: + a = a / a.sum() + return a + + +def mixing_dict(xy, normalized=False): + """Returns a dictionary representation of mixing matrix. + + Parameters + ---------- + xy : list or container of two-tuples + Pairs of (x,y) items. + + attribute : string + Node attribute key + + normalized : bool (default=False) + Return counts if False or probabilities if True. + + Returns + ------- + d: dictionary + Counts or Joint probability of occurrence of values in xy. + """ + d = {} + psum = 0.0 + for x, y in xy: + if x not in d: + d[x] = {} + if y not in d: + d[y] = {} + v = d[x].get(y, 0) + d[x][y] = v + 1 + psum += 1 + + if normalized: + for _, jdict in d.items(): + for j in jdict: + jdict[j] /= psum + return d diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py new file mode 100644 index 0000000..6488d04 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/neighbor_degree.py @@ -0,0 +1,160 @@ +import networkx as nx + +__all__ = ["average_neighbor_degree"] + + +@nx._dispatchable(edge_attrs="weight") +def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None): + r"""Returns the average degree of the neighborhood of each node. + + In an undirected graph, the neighborhood `N(i)` of node `i` contains the + nodes that are connected to `i` by an edge. + + For directed graphs, `N(i)` is defined according to the parameter `source`: + + - if source is 'in', then `N(i)` consists of predecessors of node `i`. + - if source is 'out', then `N(i)` consists of successors of node `i`. + - if source is 'in+out', then `N(i)` is both predecessors and successors. + + The average neighborhood degree of a node `i` is + + .. math:: + + k_{nn,i} = \frac{1}{|N(i)|} \sum_{j \in N(i)} k_j + + where `N(i)` are the neighbors of node `i` and `k_j` is + the degree of node `j` which belongs to `N(i)`. For weighted + graphs, an analogous measure can be defined [1]_, + + .. math:: + + k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j + + where `s_i` is the weighted degree of node `i`, `w_{ij}` + is the weight of the edge that links `i` and `j` and + `N(i)` are the neighbors of node `i`. + + + Parameters + ---------- + G : NetworkX graph + + source : string ("in"|"out"|"in+out"), optional (default="out") + Directed graphs only. + Use "in"- or "out"-neighbors of source node. + + target : string ("in"|"out"|"in+out"), optional (default="out") + Directed graphs only. + Use "in"- or "out"-degree for target node. + + nodes : list or iterable, optional (default=G.nodes) + Compute neighbor degree only for specified nodes. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used as a weight. + If None, then each edge has weight 1. + + Returns + ------- + d: dict + A dictionary keyed by node to the average degree of its neighbors. + + Raises + ------ + NetworkXError + If either `source` or `target` are not one of 'in', 'out', or 'in+out'. + If either `source` or `target` is passed for an undirected graph. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> G.edges[0, 1]["weight"] = 5 + >>> G.edges[2, 3]["weight"] = 3 + + >>> nx.average_neighbor_degree(G) + {0: 2.0, 1: 1.5, 2: 1.5, 3: 2.0} + >>> nx.average_neighbor_degree(G, weight="weight") + {0: 2.0, 1: 1.1666666666666667, 2: 1.25, 3: 2.0} + + >>> G = nx.DiGraph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> nx.average_neighbor_degree(G, source="in", target="in") + {0: 0.0, 1: 0.0, 2: 1.0, 3: 1.0} + + >>> nx.average_neighbor_degree(G, source="out", target="out") + {0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0} + + See Also + -------- + average_degree_connectivity + + References + ---------- + .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani, + "The architecture of complex weighted networks". + PNAS 101 (11): 3747–3752 (2004). + """ + if G.is_directed(): + if source == "in": + source_degree = G.in_degree + elif source == "out": + source_degree = G.out_degree + elif source == "in+out": + source_degree = G.degree + else: + raise nx.NetworkXError( + f"source argument {source} must be 'in', 'out' or 'in+out'" + ) + + if target == "in": + target_degree = G.in_degree + elif target == "out": + target_degree = G.out_degree + elif target == "in+out": + target_degree = G.degree + else: + raise nx.NetworkXError( + f"target argument {target} must be 'in', 'out' or 'in+out'" + ) + else: + if source != "out" or target != "out": + raise nx.NetworkXError( + f"source and target arguments are only supported for directed graphs" + ) + source_degree = target_degree = G.degree + + # precompute target degrees -- should *not* be weighted degree + t_deg = dict(target_degree()) + + # Set up both predecessor and successor neighbor dicts leaving empty if not needed + G_P = G_S = {n: {} for n in G} + if G.is_directed(): + # "in" or "in+out" cases: G_P contains predecessors + if "in" in source: + G_P = G.pred + # "out" or "in+out" cases: G_S contains successors + if "out" in source: + G_S = G.succ + else: + # undirected leave G_P empty but G_S is the adjacency + G_S = G.adj + + # Main loop: Compute average degree of neighbors + avg = {} + for n, deg in source_degree(nodes, weight=weight): + # handle degree zero average + if deg == 0: + avg[n] = 0.0 + continue + + # we sum over both G_P and G_S, but one of the two is usually empty. + if weight is None: + avg[n] = ( + sum(t_deg[nbr] for nbr in G_S[n]) + sum(t_deg[nbr] for nbr in G_P[n]) + ) / deg + else: + avg[n] = ( + sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_S[n].items()) + + sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_P[n].items()) + ) / deg + return avg diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py new file mode 100644 index 0000000..ea5fd28 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/pairs.py @@ -0,0 +1,127 @@ +"""Generators of x-y pairs of node data.""" + +import networkx as nx + +__all__ = ["node_attribute_xy", "node_degree_xy"] + + +@nx._dispatchable(node_attrs="attribute") +def node_attribute_xy(G, attribute, nodes=None): + """Yields 2-tuples of node attribute values for all edges in `G`. + + This generator yields, for each edge in `G` incident to a node in `nodes`, + a 2-tuple of form ``(attribute value, attribute value)`` for the parameter + specified node-attribute. + + Parameters + ---------- + G: NetworkX graph + + attribute: key + The node attribute key. + + nodes: list or iterable (optional) + Use only edges that are incident to specified nodes. + The default is all nodes. + + Yields + ------ + (x, y): 2-tuple + Generates 2-tuple of (attribute, attribute) values. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_node(1, color="red") + >>> G.add_node(2, color="blue") + >>> G.add_node(3, color="green") + >>> G.add_edge(1, 2) + >>> list(nx.node_attribute_xy(G, "color")) + [('red', 'blue')] + + Notes + ----- + For undirected graphs, each edge is produced twice, once for each edge + representation (u, v) and (v, u), with the exception of self-loop edges + which only appear once. + """ + if nodes is None: + nodes = set(G) + else: + nodes = set(nodes) + Gnodes = G.nodes + for u, nbrsdict in G.adjacency(): + if u not in nodes: + continue + uattr = Gnodes[u].get(attribute, None) + if G.is_multigraph(): + for v, keys in nbrsdict.items(): + vattr = Gnodes[v].get(attribute, None) + for _ in keys: + yield (uattr, vattr) + else: + for v in nbrsdict: + vattr = Gnodes[v].get(attribute, None) + yield (uattr, vattr) + + +@nx._dispatchable(edge_attrs="weight") +def node_degree_xy(G, x="out", y="in", weight=None, nodes=None): + """Yields 2-tuples of ``(degree, degree)`` values for edges in `G`. + + This generator yields, for each edge in `G` incident to a node in `nodes`, + a 2-tuple of form ``(degree, degree)``. The node degrees are weighted + when a `weight` attribute is specified. + + Parameters + ---------- + G: NetworkX graph + + x: string ('in','out') + The degree type for source node (directed graphs only). + + y: string ('in','out') + The degree type for target node (directed graphs only). + + weight: string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + nodes: list or iterable (optional) + Use only edges that are adjacency to specified nodes. + The default is all nodes. + + Yields + ------ + (x, y): 2-tuple + Generates 2-tuple of (degree, degree) values. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge(1, 2) + >>> list(nx.node_degree_xy(G, x="out", y="in")) + [(1, 1)] + >>> list(nx.node_degree_xy(G, x="in", y="out")) + [(0, 0)] + + Notes + ----- + For undirected graphs, each edge is produced twice, once for each edge + representation (u, v) and (v, u), with the exception of self-loop edges + which only appear once. + """ + nodes = set(G) if nodes is None else set(nodes) + if G.is_directed(): + direction = {"out": G.out_degree, "in": G.in_degree} + xdeg = direction[x] + ydeg = direction[y] + else: + xdeg = ydeg = G.degree + + for u, degu in xdeg(nodes, weight=weight): + # use G.edges to treat multigraphs correctly + neighbors = (nbr for _, nbr in G.edges(u) if nbr in nodes) + for _, degv in ydeg(neighbors, weight=weight): + yield degu, degv diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f3f3ec9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-312.pyc new file mode 100644 index 0000000..8e79b9b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/base_test.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-312.pyc new file mode 100644 index 0000000..dc8b7ef Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_connectivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-312.pyc new file mode 100644 index 0000000..8686d50 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_correlation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-312.pyc new file mode 100644 index 0000000..9362482 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_mixing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-312.pyc new file mode 100644 index 0000000..416a845 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_neighbor_degree.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-312.pyc new file mode 100644 index 0000000..c660a39 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/__pycache__/test_pairs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/base_test.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/base_test.py new file mode 100644 index 0000000..46d6300 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/base_test.py @@ -0,0 +1,81 @@ +import networkx as nx + + +class BaseTestAttributeMixing: + @classmethod + def setup_class(cls): + G = nx.Graph() + G.add_nodes_from([0, 1], fish="one") + G.add_nodes_from([2, 3], fish="two") + G.add_nodes_from([4], fish="red") + G.add_nodes_from([5], fish="blue") + G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) + cls.G = G + + D = nx.DiGraph() + D.add_nodes_from([0, 1], fish="one") + D.add_nodes_from([2, 3], fish="two") + D.add_nodes_from([4], fish="red") + D.add_nodes_from([5], fish="blue") + D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) + cls.D = D + + M = nx.MultiGraph() + M.add_nodes_from([0, 1], fish="one") + M.add_nodes_from([2, 3], fish="two") + M.add_nodes_from([4], fish="red") + M.add_nodes_from([5], fish="blue") + M.add_edges_from([(0, 1), (0, 1), (2, 3)]) + cls.M = M + + S = nx.Graph() + S.add_nodes_from([0, 1], fish="one") + S.add_nodes_from([2, 3], fish="two") + S.add_nodes_from([4], fish="red") + S.add_nodes_from([5], fish="blue") + S.add_edge(0, 0) + S.add_edge(2, 2) + cls.S = S + + N = nx.Graph() + N.add_nodes_from([0, 1], margin=-2) + N.add_nodes_from([2, 3], margin=-2) + N.add_nodes_from([4], margin=-3) + N.add_nodes_from([5], margin=-4) + N.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)]) + cls.N = N + + F = nx.Graph() + F.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5) + F.add_edge(0, 2, weight=1) + nx.set_node_attributes(F, dict(F.degree(weight="weight")), "margin") + cls.F = F + + K = nx.Graph() + K.add_nodes_from([1, 2], margin=-1) + K.add_nodes_from([3], margin=1) + K.add_nodes_from([4], margin=2) + K.add_edges_from([(3, 4), (1, 2), (1, 3)]) + cls.K = K + + +class BaseTestDegreeMixing: + @classmethod + def setup_class(cls): + cls.P4 = nx.path_graph(4) + cls.D = nx.DiGraph() + cls.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)]) + cls.D2 = nx.DiGraph() + cls.D2.add_edges_from([(0, 3), (1, 0), (1, 2), (2, 4), (4, 1), (4, 3), (4, 2)]) + cls.M = nx.MultiGraph() + nx.add_path(cls.M, range(4)) + cls.M.add_edge(0, 1) + cls.S = nx.Graph() + cls.S.add_edges_from([(0, 0), (1, 1)]) + cls.W = nx.Graph() + cls.W.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5) + cls.W.add_edge(0, 2, weight=1) + S1 = nx.star_graph(4) + S2 = nx.star_graph(4) + cls.DS = nx.disjoint_union(S1, S2) + cls.DS.add_edge(4, 5) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py new file mode 100644 index 0000000..21c6287 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_connectivity.py @@ -0,0 +1,143 @@ +from itertools import permutations + +import pytest + +import networkx as nx + + +class TestNeighborConnectivity: + def test_degree_p4(self): + G = nx.path_graph(4) + answer = {1: 2.0, 2: 1.5} + nd = nx.average_degree_connectivity(G) + assert nd == answer + + D = G.to_directed() + answer = {2: 2.0, 4: 1.5} + nd = nx.average_degree_connectivity(D) + assert nd == answer + + answer = {1: 2.0, 2: 1.5} + D = G.to_directed() + nd = nx.average_degree_connectivity(D, source="in", target="in") + assert nd == answer + + D = G.to_directed() + nd = nx.average_degree_connectivity(D, source="in", target="in") + assert nd == answer + + def test_degree_p4_weighted(self): + G = nx.path_graph(4) + G[1][2]["weight"] = 4 + answer = {1: 2.0, 2: 1.8} + nd = nx.average_degree_connectivity(G, weight="weight") + assert nd == answer + answer = {1: 2.0, 2: 1.5} + nd = nx.average_degree_connectivity(G) + assert nd == answer + + D = G.to_directed() + answer = {2: 2.0, 4: 1.8} + nd = nx.average_degree_connectivity(D, weight="weight") + assert nd == answer + + answer = {1: 2.0, 2: 1.8} + D = G.to_directed() + nd = nx.average_degree_connectivity( + D, weight="weight", source="in", target="in" + ) + assert nd == answer + + D = G.to_directed() + nd = nx.average_degree_connectivity( + D, source="in", target="out", weight="weight" + ) + assert nd == answer + + def test_weight_keyword(self): + G = nx.path_graph(4) + G[1][2]["other"] = 4 + answer = {1: 2.0, 2: 1.8} + nd = nx.average_degree_connectivity(G, weight="other") + assert nd == answer + answer = {1: 2.0, 2: 1.5} + nd = nx.average_degree_connectivity(G, weight=None) + assert nd == answer + + D = G.to_directed() + answer = {2: 2.0, 4: 1.8} + nd = nx.average_degree_connectivity(D, weight="other") + assert nd == answer + + answer = {1: 2.0, 2: 1.8} + D = G.to_directed() + nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in") + assert nd == answer + + D = G.to_directed() + nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in") + assert nd == answer + + def test_degree_barrat(self): + G = nx.star_graph(5) + G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)]) + G[0][5]["weight"] = 5 + nd = nx.average_degree_connectivity(G)[5] + assert nd == 1.8 + nd = nx.average_degree_connectivity(G, weight="weight")[5] + assert nd == pytest.approx(3.222222, abs=1e-5) + + def test_zero_deg(self): + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(1, 3) + G.add_edge(1, 4) + c = nx.average_degree_connectivity(G) + assert c == {1: 0, 3: 1} + c = nx.average_degree_connectivity(G, source="in", target="in") + assert c == {0: 0, 1: 0} + c = nx.average_degree_connectivity(G, source="in", target="out") + assert c == {0: 0, 1: 3} + c = nx.average_degree_connectivity(G, source="in", target="in+out") + assert c == {0: 0, 1: 3} + c = nx.average_degree_connectivity(G, source="out", target="out") + assert c == {0: 0, 3: 0} + c = nx.average_degree_connectivity(G, source="out", target="in") + assert c == {0: 0, 3: 1} + c = nx.average_degree_connectivity(G, source="out", target="in+out") + assert c == {0: 0, 3: 1} + + def test_in_out_weight(self): + G = nx.DiGraph() + G.add_edge(1, 2, weight=1) + G.add_edge(1, 3, weight=1) + G.add_edge(3, 1, weight=1) + for s, t in permutations(["in", "out", "in+out"], 2): + c = nx.average_degree_connectivity(G, source=s, target=t) + cw = nx.average_degree_connectivity(G, source=s, target=t, weight="weight") + assert c == cw + + def test_invalid_source(self): + with pytest.raises(nx.NetworkXError): + G = nx.DiGraph() + nx.average_degree_connectivity(G, source="bogus") + + def test_invalid_target(self): + with pytest.raises(nx.NetworkXError): + G = nx.DiGraph() + nx.average_degree_connectivity(G, target="bogus") + + def test_invalid_undirected_graph(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXError): + nx.average_degree_connectivity(G, target="bogus") + with pytest.raises(nx.NetworkXError): + nx.average_degree_connectivity(G, source="bogus") + + def test_single_node(self): + # TODO Is this really the intended behavior for providing a + # single node as the argument `nodes`? Shouldn't the function + # just return the connectivity value itself? + G = nx.trivial_graph() + conn = nx.average_degree_connectivity(G, nodes=0) + assert conn == {0: 0} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py new file mode 100644 index 0000000..147c837 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_correlation.py @@ -0,0 +1,122 @@ +import pytest + +import networkx as nx +from networkx.algorithms.assortativity.correlation import attribute_ac + +from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +class TestDegreeMixingCorrelation(BaseTestDegreeMixing): + def test_degree_assortativity_undirected(self): + r = nx.degree_assortativity_coefficient(self.P4) + np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4) + + def test_degree_assortativity_node_kwargs(self): + G = nx.Graph() + edges = [(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (5, 9), (9, 0)] + G.add_edges_from(edges) + r = nx.degree_assortativity_coefficient(G, nodes=[1, 2, 4]) + np.testing.assert_almost_equal(r, -1.0, decimal=4) + + def test_degree_assortativity_directed(self): + r = nx.degree_assortativity_coefficient(self.D) + np.testing.assert_almost_equal(r, -0.57735, decimal=4) + + def test_degree_assortativity_directed2(self): + """Test degree assortativity for a directed graph where the set of + in/out degree does not equal the total degree.""" + r = nx.degree_assortativity_coefficient(self.D2) + np.testing.assert_almost_equal(r, 0.14852, decimal=4) + + def test_degree_assortativity_multigraph(self): + r = nx.degree_assortativity_coefficient(self.M) + np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4) + + def test_degree_pearson_assortativity_undirected(self): + r = nx.degree_pearson_correlation_coefficient(self.P4) + np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4) + + def test_degree_pearson_assortativity_directed(self): + r = nx.degree_pearson_correlation_coefficient(self.D) + np.testing.assert_almost_equal(r, -0.57735, decimal=4) + + def test_degree_pearson_assortativity_directed2(self): + """Test degree assortativity with Pearson for a directed graph where + the set of in/out degree does not equal the total degree.""" + r = nx.degree_pearson_correlation_coefficient(self.D2) + np.testing.assert_almost_equal(r, 0.14852, decimal=4) + + def test_degree_pearson_assortativity_multigraph(self): + r = nx.degree_pearson_correlation_coefficient(self.M) + np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4) + + def test_degree_assortativity_weighted(self): + r = nx.degree_assortativity_coefficient(self.W, weight="weight") + np.testing.assert_almost_equal(r, -0.1429, decimal=4) + + def test_degree_assortativity_double_star(self): + r = nx.degree_assortativity_coefficient(self.DS) + np.testing.assert_almost_equal(r, -0.9339, decimal=4) + + +class TestAttributeMixingCorrelation(BaseTestAttributeMixing): + def test_attribute_assortativity_undirected(self): + r = nx.attribute_assortativity_coefficient(self.G, "fish") + assert r == 6.0 / 22.0 + + def test_attribute_assortativity_directed(self): + r = nx.attribute_assortativity_coefficient(self.D, "fish") + assert r == 1.0 / 3.0 + + def test_attribute_assortativity_multigraph(self): + r = nx.attribute_assortativity_coefficient(self.M, "fish") + assert r == 1.0 + + def test_attribute_assortativity_coefficient(self): + # from "Mixing patterns in networks" + # fmt: off + a = np.array([[0.258, 0.016, 0.035, 0.013], + [0.012, 0.157, 0.058, 0.019], + [0.013, 0.023, 0.306, 0.035], + [0.005, 0.007, 0.024, 0.016]]) + # fmt: on + r = attribute_ac(a) + np.testing.assert_almost_equal(r, 0.623, decimal=3) + + def test_attribute_assortativity_coefficient2(self): + # fmt: off + a = np.array([[0.18, 0.02, 0.01, 0.03], + [0.02, 0.20, 0.03, 0.02], + [0.01, 0.03, 0.16, 0.01], + [0.03, 0.02, 0.01, 0.22]]) + # fmt: on + r = attribute_ac(a) + np.testing.assert_almost_equal(r, 0.68, decimal=2) + + def test_attribute_assortativity(self): + a = np.array([[50, 50, 0], [50, 50, 0], [0, 0, 2]]) + r = attribute_ac(a) + np.testing.assert_almost_equal(r, 0.029, decimal=3) + + def test_attribute_assortativity_negative(self): + r = nx.numeric_assortativity_coefficient(self.N, "margin") + np.testing.assert_almost_equal(r, -0.2903, decimal=4) + + def test_assortativity_node_kwargs(self): + G = nx.Graph() + G.add_nodes_from([0, 1], size=2) + G.add_nodes_from([2, 3], size=3) + G.add_edges_from([(0, 1), (2, 3)]) + r = nx.numeric_assortativity_coefficient(G, "size", nodes=[0, 3]) + np.testing.assert_almost_equal(r, 1.0, decimal=4) + + def test_attribute_assortativity_float(self): + r = nx.numeric_assortativity_coefficient(self.F, "margin") + np.testing.assert_almost_equal(r, -0.1429, decimal=4) + + def test_attribute_assortativity_mixed(self): + r = nx.numeric_assortativity_coefficient(self.K, "margin") + np.testing.assert_almost_equal(r, 0.4340, decimal=4) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py new file mode 100644 index 0000000..589c102 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_mixing.py @@ -0,0 +1,174 @@ +import pytest + +import networkx as nx + +from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing + +np = pytest.importorskip("numpy") + + +class TestDegreeMixingDict(BaseTestDegreeMixing): + def test_degree_mixing_dict_undirected(self): + d = nx.degree_mixing_dict(self.P4) + d_result = {1: {2: 2}, 2: {1: 2, 2: 2}} + assert d == d_result + + def test_degree_mixing_dict_undirected_normalized(self): + d = nx.degree_mixing_dict(self.P4, normalized=True) + d_result = {1: {2: 1.0 / 3}, 2: {1: 1.0 / 3, 2: 1.0 / 3}} + assert d == d_result + + def test_degree_mixing_dict_directed(self): + d = nx.degree_mixing_dict(self.D) + d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}} + assert d == d_result + + def test_degree_mixing_dict_multigraph(self): + d = nx.degree_mixing_dict(self.M) + d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}} + assert d == d_result + + def test_degree_mixing_dict_weighted(self): + d = nx.degree_mixing_dict(self.W, weight="weight") + d_result = {0.5: {1.5: 1}, 1.5: {1.5: 6, 0.5: 1}} + assert d == d_result + + +class TestDegreeMixingMatrix(BaseTestDegreeMixing): + def test_degree_mixing_matrix_undirected(self): + # fmt: off + a_result = np.array([[0, 2], + [2, 2]] + ) + # fmt: on + a = nx.degree_mixing_matrix(self.P4, normalized=False) + np.testing.assert_equal(a, a_result) + a = nx.degree_mixing_matrix(self.P4) + np.testing.assert_equal(a, a_result / a_result.sum()) + + def test_degree_mixing_matrix_directed(self): + # fmt: off + a_result = np.array([[0, 0, 2], + [1, 0, 1], + [0, 0, 0]] + ) + # fmt: on + a = nx.degree_mixing_matrix(self.D, normalized=False) + np.testing.assert_equal(a, a_result) + a = nx.degree_mixing_matrix(self.D) + np.testing.assert_equal(a, a_result / a_result.sum()) + + def test_degree_mixing_matrix_multigraph(self): + # fmt: off + a_result = np.array([[0, 1, 0], + [1, 0, 3], + [0, 3, 0]] + ) + # fmt: on + a = nx.degree_mixing_matrix(self.M, normalized=False) + np.testing.assert_equal(a, a_result) + a = nx.degree_mixing_matrix(self.M) + np.testing.assert_equal(a, a_result / a_result.sum()) + + def test_degree_mixing_matrix_selfloop(self): + # fmt: off + a_result = np.array([[2]]) + # fmt: on + a = nx.degree_mixing_matrix(self.S, normalized=False) + np.testing.assert_equal(a, a_result) + a = nx.degree_mixing_matrix(self.S) + np.testing.assert_equal(a, a_result / a_result.sum()) + + def test_degree_mixing_matrix_weighted(self): + a_result = np.array([[0.0, 1.0], [1.0, 6.0]]) + a = nx.degree_mixing_matrix(self.W, weight="weight", normalized=False) + np.testing.assert_equal(a, a_result) + a = nx.degree_mixing_matrix(self.W, weight="weight") + np.testing.assert_equal(a, a_result / float(a_result.sum())) + + def test_degree_mixing_matrix_mapping(self): + a_result = np.array([[6.0, 1.0], [1.0, 0.0]]) + mapping = {0.5: 1, 1.5: 0} + a = nx.degree_mixing_matrix( + self.W, weight="weight", normalized=False, mapping=mapping + ) + np.testing.assert_equal(a, a_result) + + +class TestAttributeMixingDict(BaseTestAttributeMixing): + def test_attribute_mixing_dict_undirected(self): + d = nx.attribute_mixing_dict(self.G, "fish") + d_result = { + "one": {"one": 2, "red": 1}, + "two": {"two": 2, "blue": 1}, + "red": {"one": 1}, + "blue": {"two": 1}, + } + assert d == d_result + + def test_attribute_mixing_dict_directed(self): + d = nx.attribute_mixing_dict(self.D, "fish") + d_result = { + "one": {"one": 1, "red": 1}, + "two": {"two": 1, "blue": 1}, + "red": {}, + "blue": {}, + } + assert d == d_result + + def test_attribute_mixing_dict_multigraph(self): + d = nx.attribute_mixing_dict(self.M, "fish") + d_result = {"one": {"one": 4}, "two": {"two": 2}} + assert d == d_result + + +class TestAttributeMixingMatrix(BaseTestAttributeMixing): + def test_attribute_mixing_matrix_undirected(self): + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.G, "fish", mapping=mapping, normalized=False + ) + np.testing.assert_equal(a, a_result) + a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping) + np.testing.assert_equal(a, a_result / a_result.sum()) + + def test_attribute_mixing_matrix_directed(self): + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.D, "fish", mapping=mapping, normalized=False + ) + np.testing.assert_equal(a, a_result) + a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping) + np.testing.assert_equal(a, a_result / a_result.sum()) + + def test_attribute_mixing_matrix_multigraph(self): + mapping = {"one": 0, "two": 1, "red": 2, "blue": 3} + a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]) + a = nx.attribute_mixing_matrix( + self.M, "fish", mapping=mapping, normalized=False + ) + np.testing.assert_equal(a, a_result) + a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping) + np.testing.assert_equal(a, a_result / a_result.sum()) + + def test_attribute_mixing_matrix_negative(self): + mapping = {-2: 0, -3: 1, -4: 2} + a_result = np.array([[4.0, 1.0, 1.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]]) + a = nx.attribute_mixing_matrix( + self.N, "margin", mapping=mapping, normalized=False + ) + np.testing.assert_equal(a, a_result) + a = nx.attribute_mixing_matrix(self.N, "margin", mapping=mapping) + np.testing.assert_equal(a, a_result / float(a_result.sum())) + + def test_attribute_mixing_matrix_float(self): + mapping = {0.5: 1, 1.5: 0} + a_result = np.array([[6.0, 1.0], [1.0, 0.0]]) + a = nx.attribute_mixing_matrix( + self.F, "margin", mapping=mapping, normalized=False + ) + np.testing.assert_equal(a, a_result) + a = nx.attribute_mixing_matrix(self.F, "margin", mapping=mapping) + np.testing.assert_equal(a, a_result / a_result.sum()) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py new file mode 100644 index 0000000..92421ed --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_neighbor_degree.py @@ -0,0 +1,107 @@ +import pytest + +import networkx as nx + + +class TestAverageNeighbor: + def test_degree_p4(self): + G = nx.path_graph(4) + answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2} + nd = nx.average_neighbor_degree(G) + assert nd == answer + + D = G.to_directed() + nd = nx.average_neighbor_degree(D) + assert nd == answer + + D = nx.DiGraph(G.edges(data=True)) + nd = nx.average_neighbor_degree(D) + assert nd == {0: 1, 1: 1, 2: 0, 3: 0} + nd = nx.average_neighbor_degree(D, "in", "out") + assert nd == {0: 0, 1: 1, 2: 1, 3: 1} + nd = nx.average_neighbor_degree(D, "out", "in") + assert nd == {0: 1, 1: 1, 2: 1, 3: 0} + nd = nx.average_neighbor_degree(D, "in", "in") + assert nd == {0: 0, 1: 0, 2: 1, 3: 1} + + def test_degree_p4_weighted(self): + G = nx.path_graph(4) + G[1][2]["weight"] = 4 + answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2} + nd = nx.average_neighbor_degree(G, weight="weight") + assert nd == answer + + D = G.to_directed() + nd = nx.average_neighbor_degree(D, weight="weight") + assert nd == answer + + D = nx.DiGraph(G.edges(data=True)) + nd = nx.average_neighbor_degree(D, weight="weight") + assert nd == {0: 1, 1: 1, 2: 0, 3: 0} + nd = nx.average_neighbor_degree(D, "out", "out", weight="weight") + assert nd == {0: 1, 1: 1, 2: 0, 3: 0} + nd = nx.average_neighbor_degree(D, "in", "in", weight="weight") + assert nd == {0: 0, 1: 0, 2: 1, 3: 1} + nd = nx.average_neighbor_degree(D, "in", "out", weight="weight") + assert nd == {0: 0, 1: 1, 2: 1, 3: 1} + nd = nx.average_neighbor_degree(D, "out", "in", weight="weight") + assert nd == {0: 1, 1: 1, 2: 1, 3: 0} + nd = nx.average_neighbor_degree(D, source="in+out", weight="weight") + assert nd == {0: 1.0, 1: 1.0, 2: 0.8, 3: 1.0} + nd = nx.average_neighbor_degree(D, target="in+out", weight="weight") + assert nd == {0: 2.0, 1: 2.0, 2: 1.0, 3: 0.0} + + D = G.to_directed() + nd = nx.average_neighbor_degree(D, weight="weight") + assert nd == answer + nd = nx.average_neighbor_degree(D, source="out", target="out", weight="weight") + assert nd == answer + + D = G.to_directed() + nd = nx.average_neighbor_degree(D, source="in", target="in", weight="weight") + assert nd == answer + + def test_degree_k4(self): + G = nx.complete_graph(4) + answer = {0: 3, 1: 3, 2: 3, 3: 3} + nd = nx.average_neighbor_degree(G) + assert nd == answer + + D = G.to_directed() + nd = nx.average_neighbor_degree(D) + assert nd == answer + + D = G.to_directed() + nd = nx.average_neighbor_degree(D) + assert nd == answer + + D = G.to_directed() + nd = nx.average_neighbor_degree(D, source="in", target="in") + assert nd == answer + + def test_degree_k4_nodes(self): + G = nx.complete_graph(4) + answer = {1: 3.0, 2: 3.0} + nd = nx.average_neighbor_degree(G, nodes=[1, 2]) + assert nd == answer + + def test_degree_barrat(self): + G = nx.star_graph(5) + G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)]) + G[0][5]["weight"] = 5 + nd = nx.average_neighbor_degree(G)[5] + assert nd == 1.8 + nd = nx.average_neighbor_degree(G, weight="weight")[5] + assert nd == pytest.approx(3.222222, abs=1e-5) + + def test_error_invalid_source_target(self): + G = nx.path_graph(4) + with pytest.raises(nx.NetworkXError): + nx.average_neighbor_degree(G, "error") + with pytest.raises(nx.NetworkXError): + nx.average_neighbor_degree(G, "in", "error") + G = G.to_directed() + with pytest.raises(nx.NetworkXError): + nx.average_neighbor_degree(G, "error") + with pytest.raises(nx.NetworkXError): + nx.average_neighbor_degree(G, "in", "error") diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py new file mode 100644 index 0000000..3984292 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/assortativity/tests/test_pairs.py @@ -0,0 +1,87 @@ +import networkx as nx + +from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing + + +class TestAttributeMixingXY(BaseTestAttributeMixing): + def test_node_attribute_xy_undirected(self): + attrxy = sorted(nx.node_attribute_xy(self.G, "fish")) + attrxy_result = sorted( + [ + ("one", "one"), + ("one", "one"), + ("two", "two"), + ("two", "two"), + ("one", "red"), + ("red", "one"), + ("blue", "two"), + ("two", "blue"), + ] + ) + assert attrxy == attrxy_result + + def test_node_attribute_xy_undirected_nodes(self): + attrxy = sorted(nx.node_attribute_xy(self.G, "fish", nodes=["one", "yellow"])) + attrxy_result = sorted([]) + assert attrxy == attrxy_result + + def test_node_attribute_xy_directed(self): + attrxy = sorted(nx.node_attribute_xy(self.D, "fish")) + attrxy_result = sorted( + [("one", "one"), ("two", "two"), ("one", "red"), ("two", "blue")] + ) + assert attrxy == attrxy_result + + def test_node_attribute_xy_multigraph(self): + attrxy = sorted(nx.node_attribute_xy(self.M, "fish")) + attrxy_result = [ + ("one", "one"), + ("one", "one"), + ("one", "one"), + ("one", "one"), + ("two", "two"), + ("two", "two"), + ] + assert attrxy == attrxy_result + + def test_node_attribute_xy_selfloop(self): + attrxy = sorted(nx.node_attribute_xy(self.S, "fish")) + attrxy_result = [("one", "one"), ("two", "two")] + assert attrxy == attrxy_result + + +class TestDegreeMixingXY(BaseTestDegreeMixing): + def test_node_degree_xy_undirected(self): + xy = sorted(nx.node_degree_xy(self.P4)) + xy_result = sorted([(1, 2), (2, 1), (2, 2), (2, 2), (1, 2), (2, 1)]) + assert xy == xy_result + + def test_node_degree_xy_undirected_nodes(self): + xy = sorted(nx.node_degree_xy(self.P4, nodes=[0, 1, -1])) + xy_result = sorted([(1, 2), (2, 1)]) + assert xy == xy_result + + def test_node_degree_xy_directed(self): + xy = sorted(nx.node_degree_xy(self.D)) + xy_result = sorted([(2, 1), (2, 3), (1, 3), (1, 3)]) + assert xy == xy_result + + def test_node_degree_xy_multigraph(self): + xy = sorted(nx.node_degree_xy(self.M)) + xy_result = sorted( + [(2, 3), (2, 3), (3, 2), (3, 2), (2, 3), (3, 2), (1, 2), (2, 1)] + ) + assert xy == xy_result + + def test_node_degree_xy_selfloop(self): + xy = sorted(nx.node_degree_xy(self.S)) + xy_result = sorted([(2, 2), (2, 2)]) + assert xy == xy_result + + def test_node_degree_xy_weighted(self): + G = nx.Graph() + G.add_edge(1, 2, weight=7) + G.add_edge(2, 3, weight=10) + xy = sorted(nx.node_degree_xy(G, weight="weight")) + xy_result = sorted([(7, 17), (17, 10), (17, 7), (10, 17)]) + assert xy == xy_result diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py new file mode 100644 index 0000000..b308392 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/asteroidal.py @@ -0,0 +1,164 @@ +""" +Algorithms for asteroidal triples and asteroidal numbers in graphs. + +An asteroidal triple in a graph G is a set of three non-adjacent vertices +u, v and w such that there exist a path between any two of them that avoids +closed neighborhood of the third. More formally, v_j, v_k belongs to the same +connected component of G - N[v_i], where N[v_i] denotes the closed neighborhood +of v_i. A graph which does not contain any asteroidal triples is called +an AT-free graph. The class of AT-free graphs is a graph class for which +many NP-complete problems are solvable in polynomial time. Amongst them, +independent set and coloring. +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["is_at_free", "find_asteroidal_triple"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def find_asteroidal_triple(G): + r"""Find an asteroidal triple in the given graph. + + An asteroidal triple is a triple of non-adjacent vertices such that + there exists a path between any two of them which avoids the closed + neighborhood of the third. It checks all independent triples of vertices + and whether they are an asteroidal triple or not. This is done with the + help of a data structure called a component structure. + A component structure encodes information about which vertices belongs to + the same connected component when the closed neighborhood of a given vertex + is removed from the graph. The algorithm used to check is the trivial + one, outlined in [1]_, which has a runtime of + :math:`O(|V||\overline{E} + |V||E|)`, where the second term is the + creation of the component structure. + + Parameters + ---------- + G : NetworkX Graph + The graph to check whether is AT-free or not + + Returns + ------- + list or None + An asteroidal triple is returned as a list of nodes. If no asteroidal + triple exists, i.e. the graph is AT-free, then None is returned. + + Notes + ----- + The component structure and the algorithm is described in [1]_. The current + implementation implements the trivial algorithm for simple graphs. + + References + ---------- + .. [1] Ekkehard Köhler, + "Recognizing Graphs without asteroidal triples", + Journal of Discrete Algorithms 2, pages 439-452, 2004. + https://www.sciencedirect.com/science/article/pii/S157086670400019X + """ + V = set(G.nodes) + + if len(V) < 6: + # An asteroidal triple cannot exist in a graph with 5 or less vertices. + return None + + component_structure = create_component_structure(G) + + for u, v in nx.non_edges(G): + u_neighborhood = set(G[u]).union([u]) + v_neighborhood = set(G[v]).union([v]) + union_of_neighborhoods = u_neighborhood.union(v_neighborhood) + for w in V - union_of_neighborhoods: + # Check for each pair of vertices whether they belong to the + # same connected component when the closed neighborhood of the + # third is removed. + if ( + component_structure[u][v] == component_structure[u][w] + and component_structure[v][u] == component_structure[v][w] + and component_structure[w][u] == component_structure[w][v] + ): + return [u, v, w] + return None + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_at_free(G): + """Check if a graph is AT-free. + + The method uses the `find_asteroidal_triple` method to recognize + an AT-free graph. If no asteroidal triple is found the graph is + AT-free and True is returned. If at least one asteroidal triple is + found the graph is not AT-free and False is returned. + + Parameters + ---------- + G : NetworkX Graph + The graph to check whether is AT-free or not. + + Returns + ------- + bool + True if G is AT-free and False otherwise. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) + >>> nx.is_at_free(G) + True + + >>> G = nx.cycle_graph(6) + >>> nx.is_at_free(G) + False + """ + return find_asteroidal_triple(G) is None + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def create_component_structure(G): + r"""Create component structure for G. + + A *component structure* is an `nxn` array, denoted `c`, where `n` is + the number of vertices, where each row and column corresponds to a vertex. + + .. math:: + c_{uv} = \begin{cases} 0, if v \in N[u] \\ + k, if v \in component k of G \setminus N[u] \end{cases} + + Where `k` is an arbitrary label for each component. The structure is used + to simplify the detection of asteroidal triples. + + Parameters + ---------- + G : NetworkX Graph + Undirected, simple graph. + + Returns + ------- + component_structure : dictionary + A dictionary of dictionaries, keyed by pairs of vertices. + + """ + V = set(G.nodes) + component_structure = {} + for v in V: + label = 0 + closed_neighborhood = set(G[v]).union({v}) + row_dict = {} + for u in closed_neighborhood: + row_dict[u] = 0 + + G_reduced = G.subgraph(set(G.nodes) - closed_neighborhood) + for cc in nx.connected_components(G_reduced): + label += 1 + for u in cc: + row_dict[u] = label + + component_structure[v] = row_dict + + return component_structure diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py new file mode 100644 index 0000000..edc66b4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__init__.py @@ -0,0 +1,88 @@ +r"""This module provides functions and operations for bipartite +graphs. Bipartite graphs `B = (U, V, E)` have two node sets `U,V` and edges in +`E` that only connect nodes from opposite sets. It is common in the literature +to use an spatial analogy referring to the two node sets as top and bottom nodes. + +The bipartite algorithms are not imported into the networkx namespace +at the top level so the easiest way to use them is with: + +>>> from networkx.algorithms import bipartite + +NetworkX does not have a custom bipartite graph class but the Graph() +or DiGraph() classes can be used to represent bipartite graphs. However, +you have to keep track of which set each node belongs to, and make +sure that there is no edge between nodes of the same set. The convention used +in NetworkX is to use a node attribute named `bipartite` with values 0 or 1 to +identify the sets each node belongs to. This convention is not enforced in +the source code of bipartite functions, it's only a recommendation. + +For example: + +>>> B = nx.Graph() +>>> # Add nodes with the node attribute "bipartite" +>>> B.add_nodes_from([1, 2, 3, 4], bipartite=0) +>>> B.add_nodes_from(["a", "b", "c"], bipartite=1) +>>> # Add edges only between nodes of opposite node sets +>>> B.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) + +Many algorithms of the bipartite module of NetworkX require, as an argument, a +container with all the nodes that belong to one set, in addition to the bipartite +graph `B`. The functions in the bipartite package do not check that the node set +is actually correct nor that the input graph is actually bipartite. +If `B` is connected, you can find the two node sets using a two-coloring +algorithm: + +>>> nx.is_connected(B) +True +>>> bottom_nodes, top_nodes = bipartite.sets(B) + +However, if the input graph is not connected, there are more than one possible +colorations. This is the reason why we require the user to pass a container +with all nodes of one bipartite node set as an argument to most bipartite +functions. In the face of ambiguity, we refuse the temptation to guess and +raise an :exc:`AmbiguousSolution ` +Exception if the input graph for +:func:`bipartite.sets ` +is disconnected. + +Using the `bipartite` node attribute, you can easily get the two node sets: + +>>> top_nodes = {n for n, d in B.nodes(data=True) if d["bipartite"] == 0} +>>> bottom_nodes = set(B) - top_nodes + +So you can easily use the bipartite algorithms that require, as an argument, a +container with all nodes that belong to one node set: + +>>> print(round(bipartite.density(B, bottom_nodes), 2)) +0.5 +>>> G = bipartite.projected_graph(B, top_nodes) + +All bipartite graph generators in NetworkX build bipartite graphs with the +`bipartite` node attribute. Thus, you can use the same approach: + +>>> RB = bipartite.random_graph(5, 7, 0.2) +>>> RB_top = {n for n, d in RB.nodes(data=True) if d["bipartite"] == 0} +>>> RB_bottom = set(RB) - RB_top +>>> list(RB_top) +[0, 1, 2, 3, 4] +>>> list(RB_bottom) +[5, 6, 7, 8, 9, 10, 11] + +For other bipartite graph generators see +:mod:`Generators `. + +""" + +from networkx.algorithms.bipartite.basic import * +from networkx.algorithms.bipartite.centrality import * +from networkx.algorithms.bipartite.cluster import * +from networkx.algorithms.bipartite.covering import * +from networkx.algorithms.bipartite.edgelist import * +from networkx.algorithms.bipartite.matching import * +from networkx.algorithms.bipartite.matrix import * +from networkx.algorithms.bipartite.projection import * +from networkx.algorithms.bipartite.redundancy import * +from networkx.algorithms.bipartite.spectral import * +from networkx.algorithms.bipartite.generators import * +from networkx.algorithms.bipartite.extendability import * +from networkx.algorithms.bipartite.link_analysis import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..51dcb33 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-312.pyc new file mode 100644 index 0000000..c7ede75 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/basic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-312.pyc new file mode 100644 index 0000000..6c6254b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-312.pyc new file mode 100644 index 0000000..98571e6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/cluster.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-312.pyc new file mode 100644 index 0000000..450685b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/covering.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-312.pyc new file mode 100644 index 0000000..7c13ed6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/edgelist.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/extendability.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/extendability.cpython-312.pyc new file mode 100644 index 0000000..c63af11 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/extendability.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-312.pyc new file mode 100644 index 0000000..b4a9c67 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/generators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/link_analysis.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/link_analysis.cpython-312.pyc new file mode 100644 index 0000000..241cce4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/link_analysis.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-312.pyc new file mode 100644 index 0000000..334bac8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/matching.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-312.pyc new file mode 100644 index 0000000..8e50ebe Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/matrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-312.pyc new file mode 100644 index 0000000..6fa995d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/projection.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-312.pyc new file mode 100644 index 0000000..7d1a278 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/redundancy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-312.pyc new file mode 100644 index 0000000..3ff7dd2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/__pycache__/spectral.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py new file mode 100644 index 0000000..8d9a4d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/basic.py @@ -0,0 +1,322 @@ +""" +========================== +Bipartite Graph Algorithms +========================== +""" + +import networkx as nx +from networkx.algorithms.components import connected_components +from networkx.exception import AmbiguousSolution + +__all__ = [ + "is_bipartite", + "is_bipartite_node_set", + "color", + "sets", + "density", + "degrees", +] + + +@nx._dispatchable +def color(G): + """Returns a two-coloring of the graph. + + Raises an exception if the graph is not bipartite. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + color : dictionary + A dictionary keyed by node with a 1 or 0 as data for each node color. + + Raises + ------ + NetworkXError + If the graph is not two-colorable. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.path_graph(4) + >>> c = bipartite.color(G) + >>> print(c) + {0: 1, 1: 0, 2: 1, 3: 0} + + You can use this to set a node attribute indicating the bipartite set: + + >>> nx.set_node_attributes(G, c, "bipartite") + >>> print(G.nodes[0]["bipartite"]) + 1 + >>> print(G.nodes[1]["bipartite"]) + 0 + """ + if G.is_directed(): + import itertools + + def neighbors(v): + return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) + + else: + neighbors = G.neighbors + + color = {} + for n in G: # handle disconnected graphs + if n in color or len(G[n]) == 0: # skip isolates + continue + queue = [n] + color[n] = 1 # nodes seen with color (1 or 0) + while queue: + v = queue.pop() + c = 1 - color[v] # opposite color of node v + for w in neighbors(v): + if w in color: + if color[w] == color[v]: + raise nx.NetworkXError("Graph is not bipartite.") + else: + color[w] = c + queue.append(w) + # color isolates with 0 + color.update(dict.fromkeys(nx.isolates(G), 0)) + return color + + +@nx._dispatchable +def is_bipartite(G): + """Returns True if graph G is bipartite, False if not. + + Parameters + ---------- + G : NetworkX graph + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.path_graph(4) + >>> print(bipartite.is_bipartite(G)) + True + + See Also + -------- + color, is_bipartite_node_set + """ + try: + color(G) + return True + except nx.NetworkXError: + return False + + +@nx._dispatchable +def is_bipartite_node_set(G, nodes): + """Returns True if nodes and G/nodes are a bipartition of G. + + Parameters + ---------- + G : NetworkX graph + + nodes: list or container + Check if nodes are a one of a bipartite set. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.path_graph(4) + >>> X = set([1, 3]) + >>> bipartite.is_bipartite_node_set(G, X) + True + + Notes + ----- + An exception is raised if the input nodes are not distinct, because in this + case some bipartite algorithms will yield incorrect results. + For connected graphs the bipartite sets are unique. This function handles + disconnected graphs. + """ + S = set(nodes) + + if len(S) < len(nodes): + # this should maybe just return False? + raise AmbiguousSolution( + "The input node set contains duplicates.\n" + "This may lead to incorrect results when using it in bipartite algorithms.\n" + "Consider using set(nodes) as the input" + ) + + for CC in (G.subgraph(c).copy() for c in connected_components(G)): + X, Y = sets(CC) + if not ( + (X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S)) + ): + return False + return True + + +@nx._dispatchable +def sets(G, top_nodes=None): + """Returns bipartite node sets of graph G. + + Raises an exception if the graph is not bipartite or if the input + graph is disconnected and thus more than one valid solution exists. + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + Parameters + ---------- + G : NetworkX graph + + top_nodes : container, optional + Container with all nodes in one bipartite node set. If not supplied + it will be computed. But if more than one solution exists an exception + will be raised. + + Returns + ------- + X : set + Nodes from one side of the bipartite graph. + Y : set + Nodes from the other side. + + Raises + ------ + AmbiguousSolution + Raised if the input bipartite graph is disconnected and no container + with all nodes in one bipartite set is provided. When determining + the nodes in each bipartite set more than one valid solution is + possible if the input graph is disconnected. + NetworkXError + Raised if the input graph is not bipartite. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.path_graph(4) + >>> X, Y = bipartite.sets(G) + >>> list(X) + [0, 2] + >>> list(Y) + [1, 3] + + See Also + -------- + color + + """ + if G.is_directed(): + is_connected = nx.is_weakly_connected + else: + is_connected = nx.is_connected + if top_nodes is not None: + X = set(top_nodes) + Y = set(G) - X + else: + if not is_connected(G): + msg = "Disconnected graph: Ambiguous solution for bipartite sets." + raise nx.AmbiguousSolution(msg) + c = color(G) + X = {n for n, is_top in c.items() if is_top} + Y = {n for n, is_top in c.items() if not is_top} + return (X, Y) + + +@nx._dispatchable(graphs="B") +def density(B, nodes): + """Returns density of bipartite graph B. + + Parameters + ---------- + B : NetworkX graph + + nodes: list or container + Nodes in one node set of the bipartite graph. + + Returns + ------- + d : float + The bipartite density + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.complete_bipartite_graph(3, 2) + >>> X = set([0, 1, 2]) + >>> bipartite.density(G, X) + 1.0 + >>> Y = set([3, 4]) + >>> bipartite.density(G, Y) + 1.0 + + Notes + ----- + The container of nodes passed as argument must contain all nodes + in one of the two bipartite node sets to avoid ambiguity in the + case of disconnected graphs. + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + color + """ + n = len(B) + m = nx.number_of_edges(B) + nb = len(nodes) + nt = n - nb + if m == 0: # includes cases n==0 and n==1 + d = 0.0 + else: + if B.is_directed(): + d = m / (2 * nb * nt) + else: + d = m / (nb * nt) + return d + + +@nx._dispatchable(graphs="B", edge_attrs="weight") +def degrees(B, nodes, weight=None): + """Returns the degrees of the two node sets in the bipartite graph B. + + Parameters + ---------- + B : NetworkX graph + + nodes: list or container + Nodes in one node set of the bipartite graph. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used as a weight. + If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + (degX,degY) : tuple of dictionaries + The degrees of the two bipartite sets as dictionaries keyed by node. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.complete_bipartite_graph(3, 2) + >>> Y = set([3, 4]) + >>> degX, degY = bipartite.degrees(G, Y) + >>> dict(degX) + {0: 2, 1: 2, 2: 2} + + Notes + ----- + The container of nodes passed as argument must contain all nodes + in one of the two bipartite node sets to avoid ambiguity in the + case of disconnected graphs. + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + color, density + """ + bottom = set(nodes) + top = set(B) - bottom + return (B.degree(top, weight), B.degree(bottom, weight)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py new file mode 100644 index 0000000..42d7270 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/centrality.py @@ -0,0 +1,290 @@ +import networkx as nx + +__all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"] + + +@nx._dispatchable(name="bipartite_degree_centrality") +def degree_centrality(G, nodes): + r"""Compute the degree centrality for nodes in a bipartite network. + + The degree centrality for a node `v` is the fraction of nodes + connected to it. + + Parameters + ---------- + G : graph + A bipartite network + + nodes : list or container + Container with all nodes in one bipartite node set. + + Returns + ------- + centrality : dictionary + Dictionary keyed by node with bipartite degree centrality as the value. + + Examples + -------- + >>> G = nx.wheel_graph(5) + >>> top_nodes = {0, 1, 2} + >>> nx.bipartite.degree_centrality(G, nodes=top_nodes) + {0: 2.0, 1: 1.5, 2: 1.5, 3: 1.0, 4: 1.0} + + See Also + -------- + betweenness_centrality + closeness_centrality + :func:`~networkx.algorithms.bipartite.basic.sets` + :func:`~networkx.algorithms.bipartite.basic.is_bipartite` + + Notes + ----- + The nodes input parameter must contain all nodes in one bipartite node set, + but the dictionary returned contains all nodes from both bipartite node + sets. See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + For unipartite networks, the degree centrality values are + normalized by dividing by the maximum possible degree (which is + `n-1` where `n` is the number of nodes in G). + + In the bipartite case, the maximum possible degree of a node in a + bipartite node set is the number of nodes in the opposite node set + [1]_. The degree centrality for a node `v` in the bipartite + sets `U` with `n` nodes and `V` with `m` nodes is + + .. math:: + + d_{v} = \frac{deg(v)}{m}, \mbox{for} v \in U , + + d_{v} = \frac{deg(v)}{n}, \mbox{for} v \in V , + + + where `deg(v)` is the degree of node `v`. + + References + ---------- + .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation + Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook + of Social Network Analysis. Sage Publications. + https://dx.doi.org/10.4135/9781446294413.n28 + """ + top = set(nodes) + bottom = set(G) - top + s = 1.0 / len(bottom) + centrality = {n: d * s for n, d in G.degree(top)} + s = 1.0 / len(top) + centrality.update({n: d * s for n, d in G.degree(bottom)}) + return centrality + + +@nx._dispatchable(name="bipartite_betweenness_centrality") +def betweenness_centrality(G, nodes): + r"""Compute betweenness centrality for nodes in a bipartite network. + + Betweenness centrality of a node `v` is the sum of the + fraction of all-pairs shortest paths that pass through `v`. + + Values of betweenness are normalized by the maximum possible + value which for bipartite graphs is limited by the relative size + of the two node sets [1]_. + + Let `n` be the number of nodes in the node set `U` and + `m` be the number of nodes in the node set `V`, then + nodes in `U` are normalized by dividing by + + .. math:: + + \frac{1}{2} [m^2 (s + 1)^2 + m (s + 1)(2t - s - 1) - t (2s - t + 3)] , + + where + + .. math:: + + s = (n - 1) \div m , t = (n - 1) \mod m , + + and nodes in `V` are normalized by dividing by + + .. math:: + + \frac{1}{2} [n^2 (p + 1)^2 + n (p + 1)(2r - p - 1) - r (2p - r + 3)] , + + where, + + .. math:: + + p = (m - 1) \div n , r = (m - 1) \mod n . + + Parameters + ---------- + G : graph + A bipartite graph + + nodes : list or container + Container with all nodes in one bipartite node set. + + Returns + ------- + betweenness : dictionary + Dictionary keyed by node with bipartite betweenness centrality + as the value. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> top_nodes = {1, 2} + >>> nx.bipartite.betweenness_centrality(G, nodes=top_nodes) + {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25} + + See Also + -------- + degree_centrality + closeness_centrality + :func:`~networkx.algorithms.bipartite.basic.sets` + :func:`~networkx.algorithms.bipartite.basic.is_bipartite` + + Notes + ----- + The nodes input parameter must contain all nodes in one bipartite node set, + but the dictionary returned contains all nodes from both node sets. + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + + References + ---------- + .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation + Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook + of Social Network Analysis. Sage Publications. + https://dx.doi.org/10.4135/9781446294413.n28 + """ + top = set(nodes) + bottom = set(G) - top + n = len(top) + m = len(bottom) + s, t = divmod(n - 1, m) + bet_max_top = ( + ((m**2) * ((s + 1) ** 2)) + + (m * (s + 1) * (2 * t - s - 1)) + - (t * ((2 * s) - t + 3)) + ) / 2.0 + p, r = divmod(m - 1, n) + bet_max_bot = ( + ((n**2) * ((p + 1) ** 2)) + + (n * (p + 1) * (2 * r - p - 1)) + - (r * ((2 * p) - r + 3)) + ) / 2.0 + betweenness = nx.betweenness_centrality(G, normalized=False, weight=None) + for node in top: + betweenness[node] /= bet_max_top + for node in bottom: + betweenness[node] /= bet_max_bot + return betweenness + + +@nx._dispatchable(name="bipartite_closeness_centrality") +def closeness_centrality(G, nodes, normalized=True): + r"""Compute the closeness centrality for nodes in a bipartite network. + + The closeness of a node is the distance to all other nodes in the + graph or in the case that the graph is not connected to all other nodes + in the connected component containing that node. + + Parameters + ---------- + G : graph + A bipartite network + + nodes : list or container + Container with all nodes in one bipartite node set. + + normalized : bool, optional + If True (default) normalize by connected component size. + + Returns + ------- + closeness : dictionary + Dictionary keyed by node with bipartite closeness centrality + as the value. + + Examples + -------- + >>> G = nx.wheel_graph(5) + >>> top_nodes = {0, 1, 2} + >>> nx.bipartite.closeness_centrality(G, nodes=top_nodes) + {0: 1.5, 1: 1.2, 2: 1.2, 3: 1.0, 4: 1.0} + + See Also + -------- + betweenness_centrality + degree_centrality + :func:`~networkx.algorithms.bipartite.basic.sets` + :func:`~networkx.algorithms.bipartite.basic.is_bipartite` + + Notes + ----- + The nodes input parameter must contain all nodes in one bipartite node set, + but the dictionary returned contains all nodes from both node sets. + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + + Closeness centrality is normalized by the minimum distance possible. + In the bipartite case the minimum distance for a node in one bipartite + node set is 1 from all nodes in the other node set and 2 from all + other nodes in its own set [1]_. Thus the closeness centrality + for node `v` in the two bipartite sets `U` with + `n` nodes and `V` with `m` nodes is + + .. math:: + + c_{v} = \frac{m + 2(n - 1)}{d}, \mbox{for} v \in U, + + c_{v} = \frac{n + 2(m - 1)}{d}, \mbox{for} v \in V, + + where `d` is the sum of the distances from `v` to all + other nodes. + + Higher values of closeness indicate higher centrality. + + As in the unipartite case, setting normalized=True causes the + values to normalized further to n-1 / size(G)-1 where n is the + number of nodes in the connected part of graph containing the + node. If the graph is not completely connected, this algorithm + computes the closeness centrality for each connected part + separately. + + References + ---------- + .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation + Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook + of Social Network Analysis. Sage Publications. + https://dx.doi.org/10.4135/9781446294413.n28 + """ + closeness = {} + path_length = nx.single_source_shortest_path_length + top = set(nodes) + bottom = set(G) - top + n = len(top) + m = len(bottom) + for node in top: + sp = dict(path_length(G, node)) + totsp = sum(sp.values()) + if totsp > 0.0 and len(G) > 1: + closeness[node] = (m + 2 * (n - 1)) / totsp + if normalized: + s = (len(sp) - 1) / (len(G) - 1) + closeness[node] *= s + else: + closeness[node] = 0.0 + for node in bottom: + sp = dict(path_length(G, node)) + totsp = sum(sp.values()) + if totsp > 0.0 and len(G) > 1: + closeness[node] = (n + 2 * (m - 1)) / totsp + if normalized: + s = (len(sp) - 1) / (len(G) - 1) + closeness[node] *= s + else: + closeness[node] = 0.0 + return closeness diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py new file mode 100644 index 0000000..78b3c0f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/cluster.py @@ -0,0 +1,289 @@ +"""Functions for computing clustering of pairs""" + +import itertools + +import networkx as nx + +__all__ = [ + "clustering", + "average_clustering", + "latapy_clustering", + "robins_alexander_clustering", +] + + +def cc_dot(nu, nv): + return len(nu & nv) / len(nu | nv) + + +def cc_max(nu, nv): + return len(nu & nv) / max(len(nu), len(nv)) + + +def cc_min(nu, nv): + return len(nu & nv) / min(len(nu), len(nv)) + + +modes = {"dot": cc_dot, "min": cc_min, "max": cc_max} + + +@nx._dispatchable +def latapy_clustering(G, nodes=None, mode="dot"): + r"""Compute a bipartite clustering coefficient for nodes. + + The bipartite clustering coefficient is a measure of local density + of connections defined as [1]_: + + .. math:: + + c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|} + + where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`, + and `c_{uv}` is the pairwise clustering coefficient between nodes + `u` and `v`. + + The mode selects the function for `c_{uv}` which can be: + + `dot`: + + .. math:: + + c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|} + + `min`: + + .. math:: + + c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)} + + `max`: + + .. math:: + + c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)} + + + Parameters + ---------- + G : graph + A bipartite graph + + nodes : list or iterable (optional) + Compute bipartite clustering for these nodes. The default + is all nodes in G. + + mode : string + The pairwise bipartite clustering method to be used in the computation. + It must be "dot", "max", or "min". + + Returns + ------- + clustering : dictionary + A dictionary keyed by node with the clustering coefficient value. + + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.path_graph(4) # path graphs are bipartite + >>> c = bipartite.clustering(G) + >>> c[0] + 0.5 + >>> c = bipartite.clustering(G, mode="min") + >>> c[0] + 1.0 + + See Also + -------- + robins_alexander_clustering + average_clustering + networkx.algorithms.cluster.square_clustering + + References + ---------- + .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). + Basic notions for the analysis of large two-mode networks. + Social Networks 30(1), 31--48. + """ + if not nx.algorithms.bipartite.is_bipartite(G): + raise nx.NetworkXError("Graph is not bipartite") + + try: + cc_func = modes[mode] + except KeyError as err: + raise nx.NetworkXError( + "Mode for bipartite clustering must be: dot, min or max" + ) from err + + if nodes is None: + nodes = G + ccs = {} + for v in nodes: + cc = 0.0 + nbrs2 = {u for nbr in G[v] for u in G[nbr]} - {v} + for u in nbrs2: + cc += cc_func(set(G[u]), set(G[v])) + if cc > 0.0: # len(nbrs2)>0 + cc /= len(nbrs2) + ccs[v] = cc + return ccs + + +clustering = latapy_clustering + + +@nx._dispatchable(name="bipartite_average_clustering") +def average_clustering(G, nodes=None, mode="dot"): + r"""Compute the average bipartite clustering coefficient. + + A clustering coefficient for the whole graph is the average, + + .. math:: + + C = \frac{1}{n}\sum_{v \in G} c_v, + + where `n` is the number of nodes in `G`. + + Similar measures for the two bipartite sets can be defined [1]_ + + .. math:: + + C_X = \frac{1}{|X|}\sum_{v \in X} c_v, + + where `X` is a bipartite set of `G`. + + Parameters + ---------- + G : graph + a bipartite graph + + nodes : list or iterable, optional + A container of nodes to use in computing the average. + The nodes should be either the entire graph (the default) or one of the + bipartite sets. + + mode : string + The pairwise bipartite clustering method. + It must be "dot", "max", or "min" + + Returns + ------- + clustering : float + The average bipartite clustering for the given set of nodes or the + entire graph if no nodes are specified. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.star_graph(3) # star graphs are bipartite + >>> bipartite.average_clustering(G) + 0.75 + >>> X, Y = bipartite.sets(G) + >>> bipartite.average_clustering(G, X) + 0.0 + >>> bipartite.average_clustering(G, Y) + 1.0 + + See Also + -------- + clustering + + Notes + ----- + The container of nodes passed to this function must contain all of the nodes + in one of the bipartite sets ("top" or "bottom") in order to compute + the correct average bipartite clustering coefficients. + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + + References + ---------- + .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). + Basic notions for the analysis of large two-mode networks. + Social Networks 30(1), 31--48. + """ + if nodes is None: + nodes = G + ccs = latapy_clustering(G, nodes=nodes, mode=mode) + return sum(ccs[v] for v in nodes) / len(nodes) + + +@nx._dispatchable +def robins_alexander_clustering(G): + r"""Compute the bipartite clustering of G. + + Robins and Alexander [1]_ defined bipartite clustering coefficient as + four times the number of four cycles `C_4` divided by the number of + three paths `L_3` in a bipartite graph: + + .. math:: + + CC_4 = \frac{4 * C_4}{L_3} + + Parameters + ---------- + G : graph + a bipartite graph + + Returns + ------- + clustering : float + The Robins and Alexander bipartite clustering for the input graph. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.davis_southern_women_graph() + >>> print(round(bipartite.robins_alexander_clustering(G), 3)) + 0.468 + + See Also + -------- + latapy_clustering + networkx.algorithms.cluster.square_clustering + + References + ---------- + .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking + directors: Network structure and distance in bipartite graphs. + Computational & Mathematical Organization Theory 10(1), 69–94. + + """ + if G.order() < 4 or G.size() < 3: + return 0 + L_3 = _threepaths(G) + if L_3 == 0: + return 0 + C_4 = _four_cycles(G) + return (4.0 * C_4) / L_3 + + +def _four_cycles(G): + # Also see `square_clustering` which counts squares in a similar way + cycles = 0 + seen = set() + G_adj = G._adj + for v in G: + seen.add(v) + v_neighbors = set(G_adj[v]) + if len(v_neighbors) < 2: + # Can't form a square without at least two neighbors + continue + two_hop_neighbors = set().union(*(G_adj[u] for u in v_neighbors)) + two_hop_neighbors -= seen + for x in two_hop_neighbors: + p2 = len(v_neighbors.intersection(G_adj[x])) + cycles += p2 * (p2 - 1) + return cycles / 4 + + +def _threepaths(G): + paths = 0 + for v in G: + for u in G[v]: + for w in set(G[u]) - {v}: + paths += len(set(G[w]) - {v, u}) + # Divide by two because we count each three path twice + # one for each possible starting point + return paths / 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py new file mode 100644 index 0000000..f937903 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/covering.py @@ -0,0 +1,57 @@ +"""Functions related to graph covers.""" + +import networkx as nx +from networkx.algorithms.bipartite.matching import hopcroft_karp_matching +from networkx.algorithms.covering import min_edge_cover as _min_edge_cover +from networkx.utils import not_implemented_for + +__all__ = ["min_edge_cover"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(name="bipartite_min_edge_cover") +def min_edge_cover(G, matching_algorithm=None): + """Returns a set of edges which constitutes + the minimum edge cover of the graph. + + The smallest edge cover can be found in polynomial time by finding + a maximum matching and extending it greedily so that all nodes + are covered. + + Parameters + ---------- + G : NetworkX graph + An undirected bipartite graph. + + matching_algorithm : function + A function that returns a maximum cardinality matching in a + given bipartite graph. The function must take one input, the + graph ``G``, and return a dictionary mapping each node to its + mate. If not specified, + :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching` + will be used. Other possibilities include + :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`, + + Returns + ------- + set + A set of the edges in a minimum edge cover of the graph, given as + pairs of nodes. It contains both the edges `(u, v)` and `(v, u)` + for given nodes `u` and `v` among the edges of minimum edge cover. + + Notes + ----- + An edge cover of a graph is a set of edges such that every node of + the graph is incident to at least one edge of the set. + A minimum edge cover is an edge covering of smallest cardinality. + + Due to its implementation, the worst-case running time of this algorithm + is bounded by the worst-case running time of the function + ``matching_algorithm``. + """ + if G.order() == 0: # Special case for the empty graph + return set() + if matching_algorithm is None: + matching_algorithm = hopcroft_karp_matching + return _min_edge_cover(G, matching_algorithm=matching_algorithm) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py new file mode 100644 index 0000000..c2c6b9c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/edgelist.py @@ -0,0 +1,360 @@ +""" +******************** +Bipartite Edge Lists +******************** +Read and write NetworkX graphs as bipartite edge lists. + +Format +------ +You can read or write three formats of edge lists with these functions. + +Node pairs with no data:: + + 1 2 + +Python dictionary as data:: + + 1 2 {'weight':7, 'color':'green'} + +Arbitrary data:: + + 1 2 7 green + +For each edge (u, v) the node u is assigned to part 0 and the node v to part 1. +""" + +__all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"] + +import networkx as nx +from networkx.utils import not_implemented_for, open_file + + +@open_file(1, mode="wb") +def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"): + """Write a bipartite graph as a list of edges. + + Parameters + ---------- + G : Graph + A NetworkX bipartite graph + path : file or string + File or filename to write. If a file is provided, it must be + opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed. + comments : string, optional + The character used to indicate the start of a comment + delimiter : string, optional + The string used to separate values. The default is whitespace. + data : bool or list, optional + If False write no edge data. + If True write a string representation of the edge data dictionary.. + If a list (or other iterable) is provided, write the keys specified + in the list. + encoding: string, optional + Specify which encoding to use when writing file. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> G.add_nodes_from([0, 2], bipartite=0) + >>> G.add_nodes_from([1, 3], bipartite=1) + >>> nx.write_edgelist(G, "test.edgelist") + >>> fh = open("test.edgelist_open", "wb") + >>> nx.write_edgelist(G, fh) + >>> nx.write_edgelist(G, "test.edgelist.gz") + >>> nx.write_edgelist(G, "test.edgelist_nodata.gz", data=False) + + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=7, color="red") + >>> nx.write_edgelist(G, "test.edgelist_bigger_nodata", data=False) + >>> nx.write_edgelist(G, "test.edgelist_color", data=["color"]) + >>> nx.write_edgelist(G, "test.edgelist_color_weight", data=["color", "weight"]) + + See Also + -------- + write_edgelist + generate_edgelist + """ + for line in generate_edgelist(G, delimiter, data): + line += "\n" + path.write(line.encode(encoding)) + + +@not_implemented_for("directed") +def generate_edgelist(G, delimiter=" ", data=True): + """Generate a single line of the bipartite graph G in edge list format. + + Parameters + ---------- + G : NetworkX graph + The graph is assumed to have node attribute `part` set to 0,1 representing + the two graph parts + + delimiter : string, optional + Separator for node labels + + data : bool or list of keys + If False generate no edge data. If True use a dictionary + representation of edge data. If a list of keys use a list of data + values corresponding to the keys. + + Returns + ------- + lines : string + Lines of data in adjlist format. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.path_graph(4) + >>> G.add_nodes_from([0, 2], bipartite=0) + >>> G.add_nodes_from([1, 3], bipartite=1) + >>> G[1][2]["weight"] = 3 + >>> G[2][3]["capacity"] = 12 + >>> for line in bipartite.generate_edgelist(G, data=False): + ... print(line) + 0 1 + 2 1 + 2 3 + + >>> for line in bipartite.generate_edgelist(G): + ... print(line) + 0 1 {} + 2 1 {'weight': 3} + 2 3 {'capacity': 12} + + >>> for line in bipartite.generate_edgelist(G, data=["weight"]): + ... print(line) + 0 1 + 2 1 3 + 2 3 + """ + try: + part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0] + except BaseException as err: + raise AttributeError("Missing node attribute `bipartite`") from err + if data is True or data is False: + for n in part0: + for edge in G.edges(n, data=data): + yield delimiter.join(map(str, edge)) + else: + for n in part0: + for u, v, d in G.edges(n, data=True): + edge = [u, v] + try: + edge.extend(d[k] for k in data) + except KeyError: + pass # missing data for this edge, should warn? + yield delimiter.join(map(str, edge)) + + +@nx._dispatchable(name="bipartite_parse_edgelist", graphs=None, returns_graph=True) +def parse_edgelist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True +): + """Parse lines of an edge list representation of a bipartite graph. + + Parameters + ---------- + lines : list or iterator of strings + Input data in edgelist format + comments : string, optional + Marker for comment lines + delimiter : string, optional + Separator for node labels + create_using: NetworkX graph container, optional + Use given NetworkX graph for holding nodes or edges. + nodetype : Python type, optional + Convert nodes to this type. + data : bool or list of (label,type) tuples + If False generate no edge data or if True use a dictionary + representation of edge data or a list tuples specifying dictionary + key names and types for edge data. + + Returns + ------- + G: NetworkX Graph + The bipartite graph corresponding to lines + + Examples + -------- + Edgelist with no data: + + >>> from networkx.algorithms import bipartite + >>> lines = ["1 2", "2 3", "3 4"] + >>> G = bipartite.parse_edgelist(lines, nodetype=int) + >>> sorted(G.nodes()) + [1, 2, 3, 4] + >>> sorted(G.nodes(data=True)) + [(1, {'bipartite': 0}), (2, {'bipartite': 0}), (3, {'bipartite': 0}), (4, {'bipartite': 1})] + >>> sorted(G.edges()) + [(1, 2), (2, 3), (3, 4)] + + Edgelist with data in Python dictionary representation: + + >>> lines = ["1 2 {'weight':3}", "2 3 {'weight':27}", "3 4 {'weight':3.0}"] + >>> G = bipartite.parse_edgelist(lines, nodetype=int) + >>> sorted(G.nodes()) + [1, 2, 3, 4] + >>> sorted(G.edges(data=True)) + [(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})] + + Edgelist with data in a list: + + >>> lines = ["1 2 3", "2 3 27", "3 4 3.0"] + >>> G = bipartite.parse_edgelist(lines, nodetype=int, data=(("weight", float),)) + >>> sorted(G.nodes()) + [1, 2, 3, 4] + >>> sorted(G.edges(data=True)) + [(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})] + + See Also + -------- + """ + from ast import literal_eval + + G = nx.empty_graph(0, create_using) + for line in lines: + p = line.find(comments) + if p >= 0: + line = line[:p] + if not len(line): + continue + # split line, should have 2 or more + s = line.rstrip("\n").split(delimiter) + if len(s) < 2: + continue + u = s.pop(0) + v = s.pop(0) + d = s + if nodetype is not None: + try: + u = nodetype(u) + v = nodetype(v) + except BaseException as err: + raise TypeError( + f"Failed to convert nodes {u},{v} to type {nodetype}." + ) from err + + if len(d) == 0 or data is False: + # no data or data type specified + edgedata = {} + elif data is True: + # no edge types specified + try: # try to evaluate as dictionary + edgedata = dict(literal_eval(" ".join(d))) + except BaseException as err: + raise TypeError( + f"Failed to convert edge data ({d}) to dictionary." + ) from err + else: + # convert edge data to dictionary with specified keys and type + if len(d) != len(data): + raise IndexError( + f"Edge data {d} and data_keys {data} are not the same length" + ) + edgedata = {} + for (edge_key, edge_type), edge_value in zip(data, d): + try: + edge_value = edge_type(edge_value) + except BaseException as err: + raise TypeError( + f"Failed to convert {edge_key} data " + f"{edge_value} to type {edge_type}." + ) from err + edgedata.update({edge_key: edge_value}) + G.add_node(u, bipartite=0) + G.add_node(v, bipartite=1) + G.add_edge(u, v, **edgedata) + return G + + +@open_file(0, mode="rb") +@nx._dispatchable(name="bipartite_read_edgelist", graphs=None, returns_graph=True) +def read_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + data=True, + edgetype=None, + encoding="utf-8", +): + """Read a bipartite graph from a list of edges. + + Parameters + ---------- + path : file or string + File or filename to read. If a file is provided, it must be + opened in 'rb' mode. + Filenames ending in .gz or .bz2 will be decompressed. + comments : string, optional + The character used to indicate the start of a comment. + delimiter : string, optional + The string used to separate values. The default is whitespace. + create_using : Graph container, optional, + Use specified container to build graph. The default is networkx.Graph, + an undirected graph. + nodetype : int, float, str, Python type, optional + Convert node data from strings to specified type + data : bool or list of (label,type) tuples + Tuples specifying dictionary key names and types for edge data + edgetype : int, float, str, Python type, optional OBSOLETE + Convert edge data from strings to specified type and use as 'weight' + encoding: string, optional + Specify which encoding to use when reading file. + + Returns + ------- + G : graph + A networkx Graph or other type specified with create_using + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.path_graph(4) + >>> G.add_nodes_from([0, 2], bipartite=0) + >>> G.add_nodes_from([1, 3], bipartite=1) + >>> bipartite.write_edgelist(G, "test.edgelist") + >>> G = bipartite.read_edgelist("test.edgelist") + + >>> fh = open("test.edgelist", "rb") + >>> G = bipartite.read_edgelist(fh) + >>> fh.close() + + >>> G = bipartite.read_edgelist("test.edgelist", nodetype=int) + + Edgelist with data in a list: + + >>> textline = "1 2 3" + >>> fh = open("test.edgelist", "w") + >>> d = fh.write(textline) + >>> fh.close() + >>> G = bipartite.read_edgelist( + ... "test.edgelist", nodetype=int, data=(("weight", float),) + ... ) + >>> list(G) + [1, 2] + >>> list(G.edges(data=True)) + [(1, 2, {'weight': 3.0})] + + See parse_edgelist() for more examples of formatting. + + See Also + -------- + parse_edgelist + + Notes + ----- + Since nodes must be hashable, the function nodetype must return hashable + types (e.g. int, float, str, frozenset - or tuples of those, etc.) + """ + lines = (line.decode(encoding) for line in path) + return parse_edgelist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=data, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py new file mode 100644 index 0000000..61d8d06 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/extendability.py @@ -0,0 +1,105 @@ +"""Provides a function for computing the extendability of a graph which is +undirected, simple, connected and bipartite and contains at least one perfect matching.""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["maximal_extendability"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def maximal_extendability(G): + """Computes the extendability of a graph. + + The extendability of a graph is defined as the maximum $k$ for which `G` + is $k$-extendable. Graph `G` is $k$-extendable if and only if `G` has a + perfect matching and every set of $k$ independent edges can be extended + to a perfect matching in `G`. + + Parameters + ---------- + G : NetworkX Graph + A fully-connected bipartite graph without self-loops + + Returns + ------- + extendability : int + + Raises + ------ + NetworkXError + If the graph `G` is disconnected. + If the graph `G` is not bipartite. + If the graph `G` does not contain a perfect matching. + If the residual graph of `G` is not strongly connected. + + Notes + ----- + Definition: + Let `G` be a simple, connected, undirected and bipartite graph with a perfect + matching M and bipartition (U,V). The residual graph of `G`, denoted by $G_M$, + is the graph obtained from G by directing the edges of M from V to U and the + edges that do not belong to M from U to V. + + Lemma [1]_ : + Let M be a perfect matching of `G`. `G` is $k$-extendable if and only if its residual + graph $G_M$ is strongly connected and there are $k$ vertex-disjoint directed + paths between every vertex of U and every vertex of V. + + Assuming that input graph `G` is undirected, simple, connected, bipartite and contains + a perfect matching M, this function constructs the residual graph $G_M$ of G and + returns the minimum value among the maximum vertex-disjoint directed paths between + every vertex of U and every vertex of V in $G_M$. By combining the definitions + and the lemma, this value represents the extendability of the graph `G`. + + Time complexity O($n^3$ $m^2$)) where $n$ is the number of vertices + and $m$ is the number of edges. + + References + ---------- + .. [1] "A polynomial algorithm for the extendability problem in bipartite graphs", + J. Lakhal, L. Litzler, Information Processing Letters, 1998. + .. [2] "On n-extendible graphs", M. D. Plummer, Discrete Mathematics, 31:201–210, 1980 + https://doi.org/10.1016/0012-365X(80)90037-0 + + """ + if not nx.is_connected(G): + raise nx.NetworkXError("Graph G is not connected") + + if not nx.bipartite.is_bipartite(G): + raise nx.NetworkXError("Graph G is not bipartite") + + U, V = nx.bipartite.sets(G) + + maximum_matching = nx.bipartite.hopcroft_karp_matching(G) + + if not nx.is_perfect_matching(G, maximum_matching): + raise nx.NetworkXError("Graph G does not contain a perfect matching") + + # list of edges in perfect matching, directed from V to U + pm = [(node, maximum_matching[node]) for node in V & maximum_matching.keys()] + + # Direct all the edges of G, from V to U if in matching, else from U to V + directed_edges = [ + (x, y) if (x in V and (x, y) in pm) or (x in U and (y, x) not in pm) else (y, x) + for x, y in G.edges + ] + + # Construct the residual graph of G + residual_G = nx.DiGraph() + residual_G.add_nodes_from(G) + residual_G.add_edges_from(directed_edges) + + if not nx.is_strongly_connected(residual_G): + raise nx.NetworkXError("The residual graph of G is not strongly connected") + + # For node-pairs between V & U, keep min of max number of node-disjoint paths + # Variable $k$ stands for the extendability of graph G + k = float("inf") + for u in U: + for v in V: + num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v)) + k = k if k < num_paths else num_paths + return k diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py new file mode 100644 index 0000000..6e73a4d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/generators.py @@ -0,0 +1,603 @@ +""" +Generators and functions for bipartite graphs. +""" + +import math +import numbers +from functools import reduce + +import networkx as nx +from networkx.utils import nodes_or_number, py_random_state + +__all__ = [ + "configuration_model", + "havel_hakimi_graph", + "reverse_havel_hakimi_graph", + "alternating_havel_hakimi_graph", + "preferential_attachment_graph", + "random_graph", + "gnmk_random_graph", + "complete_bipartite_graph", +] + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number([0, 1]) +def complete_bipartite_graph(n1, n2, create_using=None): + """Returns the complete bipartite graph `K_{n_1,n_2}`. + + The graph is composed of two partitions with nodes 0 to (n1 - 1) + in the first and nodes n1 to (n1 + n2 - 1) in the second. + Each node in the first is connected to each node in the second. + + Parameters + ---------- + n1, n2 : integer or iterable container of nodes + If integers, nodes are from `range(n1)` and `range(n1, n1 + n2)`. + If a container, the elements are the nodes. + create_using : NetworkX graph instance, (default: nx.Graph) + Return graph of this type. + + Notes + ----- + Nodes are the integers 0 to `n1 + n2 - 1` unless either n1 or n2 are + containers of nodes. If only one of n1 or n2 are integers, that + integer is replaced by `range` of that integer. + + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.complete_bipartite_graph + """ + G = nx.empty_graph(0, create_using) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + n1, top = n1 + n2, bottom = n2 + if isinstance(n1, numbers.Integral) and isinstance(n2, numbers.Integral): + bottom = [n1 + i for i in bottom] + G.add_nodes_from(top, bipartite=0) + G.add_nodes_from(bottom, bipartite=1) + if len(G) != len(top) + len(bottom): + raise nx.NetworkXError("Inputs n1 and n2 must contain distinct nodes") + G.add_edges_from((u, v) for u in top for v in bottom) + G.graph["name"] = f"complete_bipartite_graph({len(top)}, {len(bottom)})" + return G + + +@py_random_state(3) +@nx._dispatchable(name="bipartite_configuration_model", graphs=None, returns_graph=True) +def configuration_model(aseq, bseq, create_using=None, seed=None): + """Returns a random bipartite graph from two given degree sequences. + + Parameters + ---------- + aseq : list + Degree sequence for node set A. + bseq : list + Degree sequence for node set B. + create_using : NetworkX graph instance, optional + Return graph of this type. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1). + Nodes from set A are connected to nodes in set B by choosing + randomly from the possible free stubs, one in A and one in B. + + Notes + ----- + The sum of the two sequences must be equal: sum(aseq)=sum(bseq) + If no graph type is specified use MultiGraph with parallel edges. + If you want a graph with no parallel edges use create_using=Graph() + but then the resulting degree sequences might not be exact. + + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.configuration_model + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + # length and sum of each sequence + lena = len(aseq) + lenb = len(bseq) + suma = sum(aseq) + sumb = sum(bseq) + + if not suma == sumb: + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) + + G = _add_nodes_with_bipartite_label(G, lena, lenb) + + if len(aseq) == 0 or max(aseq) == 0: + return G # done if no edges + + # build lists of degree-repeated vertex numbers + stubs = [[v] * aseq[v] for v in range(lena)] + astubs = [x for subseq in stubs for x in subseq] + + stubs = [[v] * bseq[v - lena] for v in range(lena, lena + lenb)] + bstubs = [x for subseq in stubs for x in subseq] + + # shuffle lists + seed.shuffle(astubs) + seed.shuffle(bstubs) + + G.add_edges_from([astubs[i], bstubs[i]] for i in range(suma)) + + G.name = "bipartite_configuration_model" + return G + + +@nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None, returns_graph=True) +def havel_hakimi_graph(aseq, bseq, create_using=None): + """Returns a bipartite graph from two given degree sequences using a + Havel-Hakimi style construction. + + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1). + Nodes from the set A are connected to nodes in the set B by + connecting the highest degree nodes in set A to the highest degree + nodes in set B until all stubs are connected. + + Parameters + ---------- + aseq : list + Degree sequence for node set A. + bseq : list + Degree sequence for node set B. + create_using : NetworkX graph instance, optional + Return graph of this type. + + Notes + ----- + The sum of the two sequences must be equal: sum(aseq)=sum(bseq) + If no graph type is specified use MultiGraph with parallel edges. + If you want a graph with no parallel edges use create_using=Graph() + but then the resulting degree sequences might not be exact. + + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.havel_hakimi_graph + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + # length of the each sequence + naseq = len(aseq) + nbseq = len(bseq) + + suma = sum(aseq) + sumb = sum(bseq) + + if not suma == sumb: + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) + + G = _add_nodes_with_bipartite_label(G, naseq, nbseq) + + if len(aseq) == 0 or max(aseq) == 0: + return G # done if no edges + + # build list of degree-repeated vertex numbers + astubs = [[aseq[v], v] for v in range(naseq)] + bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)] + astubs.sort() + while astubs: + (degree, u) = astubs.pop() # take of largest degree node in the a set + if degree == 0: + break # done, all are zero + # connect the source to largest degree nodes in the b set + bstubs.sort() + for target in bstubs[-degree:]: + v = target[1] + G.add_edge(u, v) + target[0] -= 1 # note this updates bstubs too. + if target[0] == 0: + bstubs.remove(target) + + G.name = "bipartite_havel_hakimi_graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def reverse_havel_hakimi_graph(aseq, bseq, create_using=None): + """Returns a bipartite graph from two given degree sequences using a + Havel-Hakimi style construction. + + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1). + Nodes from set A are connected to nodes in the set B by connecting + the highest degree nodes in set A to the lowest degree nodes in + set B until all stubs are connected. + + Parameters + ---------- + aseq : list + Degree sequence for node set A. + bseq : list + Degree sequence for node set B. + create_using : NetworkX graph instance, optional + Return graph of this type. + + Notes + ----- + The sum of the two sequences must be equal: sum(aseq)=sum(bseq) + If no graph type is specified use MultiGraph with parallel edges. + If you want a graph with no parallel edges use create_using=Graph() + but then the resulting degree sequences might not be exact. + + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.reverse_havel_hakimi_graph + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + # length of the each sequence + lena = len(aseq) + lenb = len(bseq) + suma = sum(aseq) + sumb = sum(bseq) + + if not suma == sumb: + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) + + G = _add_nodes_with_bipartite_label(G, lena, lenb) + + if len(aseq) == 0 or max(aseq) == 0: + return G # done if no edges + + # build list of degree-repeated vertex numbers + astubs = [[aseq[v], v] for v in range(lena)] + bstubs = [[bseq[v - lena], v] for v in range(lena, lena + lenb)] + astubs.sort() + bstubs.sort() + while astubs: + (degree, u) = astubs.pop() # take of largest degree node in the a set + if degree == 0: + break # done, all are zero + # connect the source to the smallest degree nodes in the b set + for target in bstubs[0:degree]: + v = target[1] + G.add_edge(u, v) + target[0] -= 1 # note this updates bstubs too. + if target[0] == 0: + bstubs.remove(target) + + G.name = "bipartite_reverse_havel_hakimi_graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def alternating_havel_hakimi_graph(aseq, bseq, create_using=None): + """Returns a bipartite graph from two given degree sequences using + an alternating Havel-Hakimi style construction. + + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1). + Nodes from the set A are connected to nodes in the set B by + connecting the highest degree nodes in set A to alternatively the + highest and the lowest degree nodes in set B until all stubs are + connected. + + Parameters + ---------- + aseq : list + Degree sequence for node set A. + bseq : list + Degree sequence for node set B. + create_using : NetworkX graph instance, optional + Return graph of this type. + + Notes + ----- + The sum of the two sequences must be equal: sum(aseq)=sum(bseq) + If no graph type is specified use MultiGraph with parallel edges. + If you want a graph with no parallel edges use create_using=Graph() + but then the resulting degree sequences might not be exact. + + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.alternating_havel_hakimi_graph + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + # length of the each sequence + naseq = len(aseq) + nbseq = len(bseq) + suma = sum(aseq) + sumb = sum(bseq) + + if not suma == sumb: + raise nx.NetworkXError( + f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}" + ) + + G = _add_nodes_with_bipartite_label(G, naseq, nbseq) + + if len(aseq) == 0 or max(aseq) == 0: + return G # done if no edges + # build list of degree-repeated vertex numbers + astubs = [[aseq[v], v] for v in range(naseq)] + bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)] + while astubs: + astubs.sort() + (degree, u) = astubs.pop() # take of largest degree node in the a set + if degree == 0: + break # done, all are zero + bstubs.sort() + small = bstubs[0 : degree // 2] # add these low degree targets + large = bstubs[(-degree + degree // 2) :] # now high degree targets + stubs = [x for z in zip(large, small) for x in z] # combine, sorry + if len(stubs) < len(small) + len(large): # check for zip truncation + stubs.append(large.pop()) + for target in stubs: + v = target[1] + G.add_edge(u, v) + target[0] -= 1 # note this updates bstubs too. + if target[0] == 0: + bstubs.remove(target) + + G.name = "bipartite_alternating_havel_hakimi_graph" + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def preferential_attachment_graph(aseq, p, create_using=None, seed=None): + """Create a bipartite graph with a preferential attachment model from + a given single degree sequence. + + The graph is composed of two partitions. Set A has nodes 0 to + (len(aseq) - 1) and set B has nodes starting with node len(aseq). + The number of nodes in set B is random. + + Parameters + ---------- + aseq : list + Degree sequence for node set A. + p : float + Probability that a new bottom node is added. + create_using : NetworkX graph instance, optional + Return graph of this type. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + References + ---------- + .. [1] Guillaume, J.L. and Latapy, M., + Bipartite graphs as models of complex networks. + Physica A: Statistical Mechanics and its Applications, + 2006, 371(2), pp.795-813. + .. [2] Jean-Loup Guillaume and Matthieu Latapy, + Bipartite structure of all complex networks, + Inf. Process. Lett. 90, 2004, pg. 215-221 + https://doi.org/10.1016/j.ipl.2004.03.007 + + Notes + ----- + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.preferential_attachment_graph + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + if p > 1: + raise nx.NetworkXError(f"probability {p} > 1") + + naseq = len(aseq) + G = _add_nodes_with_bipartite_label(G, naseq, 0) + vv = [[v] * aseq[v] for v in range(naseq)] + while vv: + while vv[0]: + source = vv[0][0] + vv[0].remove(source) + if seed.random() < p or len(G) == naseq: + target = len(G) + G.add_node(target, bipartite=1) + G.add_edge(source, target) + else: + bb = [[b] * G.degree(b) for b in range(naseq, len(G))] + # flatten the list of lists into a list. + bbstubs = reduce(lambda x, y: x + y, bb) + # choose preferentially a bottom node. + target = seed.choice(bbstubs) + G.add_node(target, bipartite=1) + G.add_edge(source, target) + vv.remove(vv[0]) + G.name = "bipartite_preferential_attachment_model" + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_graph(n, m, p, seed=None, directed=False): + """Returns a bipartite random graph. + + This is a bipartite version of the binomial (Erdős-Rényi) graph. + The graph is composed of two partitions. Set A has nodes 0 to + (n - 1) and set B has nodes n to (n + m - 1). + + Parameters + ---------- + n : int + The number of nodes in the first bipartite set. + m : int + The number of nodes in the second bipartite set. + p : float + Probability for edge creation. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + directed : bool, optional (default=False) + If True return a directed graph + + Notes + ----- + The bipartite random graph algorithm chooses each of the n*m (undirected) + or 2*nm (directed) possible edges with probability p. + + This algorithm is $O(n+m)$ where $m$ is the expected number of edges. + + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.random_graph + + See Also + -------- + gnp_random_graph, configuration_model + + References + ---------- + .. [1] Vladimir Batagelj and Ulrik Brandes, + "Efficient generation of large random networks", + Phys. Rev. E, 71, 036113, 2005. + """ + G = nx.Graph() + G = _add_nodes_with_bipartite_label(G, n, m) + if directed: + G = nx.DiGraph(G) + G.name = f"fast_gnp_random_graph({n},{m},{p})" + + if p <= 0: + return G + if p >= 1: + return nx.complete_bipartite_graph(n, m) + + lp = math.log(1.0 - p) + + v = 0 + w = -1 + while v < n: + lr = math.log(1.0 - seed.random()) + w = w + 1 + int(lr / lp) + while w >= m and v < n: + w = w - m + v = v + 1 + if v < n: + G.add_edge(v, n + w) + + if directed: + # use the same algorithm to + # add edges from the "m" to "n" set + v = 0 + w = -1 + while v < n: + lr = math.log(1.0 - seed.random()) + w = w + 1 + int(lr / lp) + while w >= m and v < n: + w = w - m + v = v + 1 + if v < n: + G.add_edge(n + w, v) + + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def gnmk_random_graph(n, m, k, seed=None, directed=False): + """Returns a random bipartite graph G_{n,m,k}. + + Produces a bipartite graph chosen randomly out of the set of all graphs + with n top nodes, m bottom nodes, and k edges. + The graph is composed of two sets of nodes. + Set A has nodes 0 to (n - 1) and set B has nodes n to (n + m - 1). + + Parameters + ---------- + n : int + The number of nodes in the first bipartite set. + m : int + The number of nodes in the second bipartite set. + k : int + The number of edges + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + directed : bool, optional (default=False) + If True return a directed graph + + Examples + -------- + >>> G = nx.bipartite.gnmk_random_graph(10, 20, 50) + + See Also + -------- + gnm_random_graph + + Notes + ----- + If k > m * n then a complete bipartite graph is returned. + + This graph is a bipartite version of the `G_{nm}` random graph model. + + The nodes are assigned the attribute 'bipartite' with the value 0 or 1 + to indicate which bipartite set the node belongs to. + + This function is not imported in the main namespace. + To use it use nx.bipartite.gnmk_random_graph + """ + G = nx.Graph() + G = _add_nodes_with_bipartite_label(G, n, m) + if directed: + G = nx.DiGraph(G) + G.name = f"bipartite_gnm_random_graph({n},{m},{k})" + if n == 1 or m == 1: + return G + max_edges = n * m # max_edges for bipartite networks + if k >= max_edges: # Maybe we should raise an exception here + return nx.complete_bipartite_graph(n, m, create_using=G) + + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] + bottom = list(set(G) - set(top)) + edge_count = 0 + while edge_count < k: + # generate random edge,u,v + u = seed.choice(top) + v = seed.choice(bottom) + if v in G[u]: + continue + else: + G.add_edge(u, v) + edge_count += 1 + return G + + +def _add_nodes_with_bipartite_label(G, lena, lenb): + G.add_nodes_from(range(lena + lenb)) + b = dict(zip(range(lena), [0] * lena)) + b.update(dict(zip(range(lena, lena + lenb), [1] * lenb))) + nx.set_node_attributes(G, b, "bipartite") + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/link_analysis.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/link_analysis.py new file mode 100644 index 0000000..7238b1e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/link_analysis.py @@ -0,0 +1,316 @@ +import itertools + +import networkx as nx + +__all__ = ["birank"] + + +@nx._dispatchable(edge_attrs="weight") +def birank( + G, + nodes, + *, + alpha=None, + beta=None, + top_personalization=None, + bottom_personalization=None, + max_iter=100, + tol=1.0e-6, + weight="weight", +): + r"""Compute the BiRank score for nodes in a bipartite network. + + Given the bipartite sets $U$ and $P$, the BiRank algorithm seeks to satisfy + the following recursive relationships between the scores of nodes $j \in P$ + and $i \in U$: + + .. math:: + + p_j = \alpha \sum_{i \in U} \frac{w_{ij}}{\sqrt{d_i}\sqrt{d_j}} u_i + + (1 - \alpha) p_j^0 + + u_i = \beta \sum_{j \in P} \frac{w_{ij}}{\sqrt{d_i}\sqrt{d_j}} p_j + + (1 - \beta) u_i^0 + + where + + * $p_j$ and $u_i$ are the BiRank scores of nodes $j \in P$ and $i \in U$. + * $w_{ij}$ is the weight of the edge between nodes $i \in U$ and $j \in P$ + (With a value of 0 if no edge exists). + * $d_i$ and $d_j$ are the weighted degrees of nodes $i \in U$ and $j \in P$, + respectively. + * $p_j^0$ and $u_i^0$ are personalization values that can encode a priori + weights for the nodes $j \in P$ and $i \in U$, respectively. Akin to the + personalization vector used by PageRank. + * $\alpha$ and $\beta$ are damping hyperparameters applying to nodes in $P$ + and $U$ respectively. They can take values in the interval $[0, 1]$, and + are analogous to those used by PageRank. + + Below are two use cases for this algorithm. + + 1. Personalized Recommendation System + Given a bipartite graph representing users and items, BiRank can be used + as a collaborative filtering algorithm to recommend items to users. + Previous ratings are encoded as edge weights, and the specific ratings + of an individual user on a set of items is used as the personalization + vector over items. See the example below for an implementation of this + on a toy dataset provided in [1]_. + + 2. Popularity Prediction + Given a bipartite graph representing user interactions with items, e.g. + commits to a GitHub repository, BiRank can be used to predict the + popularity of a given item. Edge weights should encode the strength of + the interaction signal. This could be a raw count, or weighted by a time + decay function like that specified in Eq. (15) of [1]_. The + personalization vectors can be used to encode existing popularity + signals, for example, the monthly download count of a repository's + package. + + Parameters + ---------- + G : graph + A bipartite network + + nodes : iterable of nodes + Container with all nodes belonging to the first bipartite node set + ('top'). The nodes in this set use the hyperparameter `alpha`, and the + personalization dictionary `top_personalization`. The nodes in the second + bipartite node set ('bottom') are automatically determined by taking the + complement of 'top' with respect to the graph `G`. + + alpha : float, optional (default=0.80 if top_personalization not empty, else 1) + Damping factor for the 'top' nodes. Must be in the interval $[0, 1]$. + Larger alpha and beta generally reduce the effect of the personalizations + and increase the number of iterations before convergence. Choice of value + is largely dependent on use case, and experimentation is recommended. + + beta : float, optional (default=0.80 if bottom_personalization not empty, else 1) + Damping factor for the 'bottom' nodes. Must be in the interval $[0, 1]$. + Larger alpha and beta generally reduce the effect of the personalizations + and increase the number of iterations before convergence. Choice of value + is largely dependent on use case, and experimentation is recommended. + + top_personalization : dict, optional (default=None) + Dictionary keyed by nodes in 'top' to that node's personalization value. + Unspecified nodes in 'top' will be assigned a personalization value of 0. + Personalization values are used to encode a priori weights for a given node, + and should be non-negative. + + bottom_personalization : dict, optional (default=None) + Dictionary keyed by nodes in 'bottom' to that node's personalization value. + Unspecified nodes in 'bottom' will be assigned a personalization value of 0. + Personalization values are used to encode a priori weights for a given node, + and should be non-negative. + + max_iter : int, optional (default=100) + Maximum number of iterations in power method eigenvalue solver. + + tol : float, optional (default=1.0e-6) + Error tolerance used to check convergence in power method solver. The + iteration will stop after a tolerance of both ``len(top) * tol`` and + ``len(bottom) * tol`` is reached for nodes in 'top' and 'bottom' + respectively. + + weight : string or None, optional (default='weight') + Edge data key to use as weight. + + Returns + ------- + birank : dictionary + Dictionary keyed by node to that node's BiRank score. + + Raises + ------ + NetworkXAlgorithmError + If the parameters `alpha` or `beta` are not in the interval [0, 1], + if either of the bipartite sets are empty, or if negative values are + provided in the personalization dictionaries. + + PowerIterationFailedConvergence + If the algorithm fails to converge to the specified tolerance + within the specified number of iterations of the power iteration + method. + + Examples + -------- + Construct a bipartite graph with user-item ratings and use BiRank to + recommend items to a user (user 1). The example below uses the `rating` + edge attribute as the weight of the edges. The `top_personalization` vector + is used to encode the user's previous ratings on items. + + Creation of graph, bipartite sets for the example. + + >>> elist = [ + ... ("u1", "p1", 5), + ... ("u2", "p1", 5), + ... ("u2", "p2", 4), + ... ("u3", "p1", 3), + ... ("u3", "p3", 2), + ... ] + >>> G = nx.Graph() + >>> G.add_weighted_edges_from(elist, weight="rating") + >>> product_nodes = ("p1", "p2", "p3") + >>> user = "u1" + + First, we create a personalization vector for the user based on on their + ratings of past items. In this case they have only rated one item (p1, with + a rating of 5) in the past. + + >>> user_personalization = { + ... product: rating + ... for _, product, rating in G.edges(nbunch=user, data="rating") + ... } + >>> user_personalization + {'p1': 5} + + Calculate the BiRank score of all nodes in the graph, filter for the items + that the user has not rated yet, and sort the results by score. + + >>> user_birank_results = nx.bipartite.birank( + ... G, product_nodes, top_personalization=user_personalization, weight="rating" + ... ) + >>> user_birank_results = filter( + ... lambda item: item[0][0] == "p" and user not in G.neighbors(item[0]), + ... user_birank_results.items(), + ... ) + >>> user_birank_results = sorted( + ... user_birank_results, key=lambda item: item[1], reverse=True + ... ) + >>> user_recommendations = { + ... product: round(score, 5) for product, score in user_birank_results + ... } + >>> user_recommendations + {'p2': 1.44818, 'p3': 1.04811} + + We find that user 1 should be recommended item p2 over item p3. This is due + to the fact that user 2 rated also rated p1 highly, while user 3 did not. + Thus user 2's tastes are inferred to be similar to user 1's, and carry more + weight in the recommendation. + + See Also + -------- + :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank` + :func:`~networkx.algorithms.link_analysis.hits_alg.hits` + :func:`~networkx.algorithms.bipartite.centrality.betweenness_centrality` + :func:`~networkx.algorithms.bipartite.basic.sets` + :func:`~networkx.algorithms.bipartite.basic.is_bipartite` + + Notes + ----- + The `nodes` input parameter must contain all nodes in one bipartite + node set, but the dictionary returned contains all nodes from both + bipartite node sets. See :mod:`bipartite documentation + ` for further details on how + bipartite graphs are handled in NetworkX. + + In the case a personalization dictionary is not provided for top (bottom) + `alpha` (`beta`) will default to 1. This is because a damping factor + without a non-zero entry in the personalization vector will lead to the + algorithm converging to the zero vector. + + References + ---------- + .. [1] Xiangnan He, Ming Gao, Min-Yen Kan, and Dingxian Wang. 2017. + BiRank: Towards Ranking on Bipartite Graphs. IEEE Trans. on Knowl. + and Data Eng. 29, 1 (January 2017), 57–71. + https://arxiv.org/pdf/1708.04396 + + """ + import numpy as np + import scipy as sp + + # Initialize the sets of top and bottom nodes + top = set(nodes) + bottom = set(G) - top + top_count = len(top) + bottom_count = len(bottom) + + if top_count == 0 or bottom_count == 0: + raise nx.NetworkXAlgorithmError( + "The BiRank algorithm requires a bipartite graph with at least one" + "node in each set." + ) + + # Clean the personalization dictionaries + top_personalization = _clean_personalization_dict(top_personalization) + bottom_personalization = _clean_personalization_dict(bottom_personalization) + + # Set default values for alpha and beta if not provided + if alpha is None: + alpha = 0.8 if top_personalization else 1 + if beta is None: + beta = 0.8 if bottom_personalization else 1 + + if alpha < 0 or alpha > 1: + raise nx.NetworkXAlgorithmError("alpha must be in the interval [0, 1]") + if beta < 0 or beta > 1: + raise nx.NetworkXAlgorithmError("beta must be in the interval [0, 1]") + + # Initialize query vectors + p0 = np.array([top_personalization.get(n, 0) for n in top], dtype=float) + u0 = np.array([bottom_personalization.get(n, 0) for n in bottom], dtype=float) + + # Construct degree normalized biadjacency matrix `S` and its transpose + W = nx.bipartite.biadjacency_matrix(G, bottom, top, weight=weight, dtype=float) + p_degrees = W.sum(axis=0, dtype=float) + # Handle case where the node is disconnected - avoids warning + p_degrees[p_degrees == 0] = 1.0 + D_p = sp.sparse.dia_array( + ([1.0 / np.sqrt(p_degrees)], [0]), + shape=(top_count, top_count), + dtype=float, + ) + u_degrees = W.sum(axis=1, dtype=float) + u_degrees[u_degrees == 0] = 1.0 + D_u = sp.sparse.dia_array( + ([1.0 / np.sqrt(u_degrees)], [0]), + shape=(bottom_count, bottom_count), + dtype=float, + ) + S = D_u.tocsr() @ W @ D_p.tocsr() + S_T = S.T + + # Initialize birank vectors for iteration + p = np.ones(top_count, dtype=float) / top_count + u = beta * (S @ p) + (1 - beta) * u0 + + # Iterate until convergence + for _ in range(max_iter): + p_last = p + u_last = u + p = alpha * (S_T @ u) + (1 - alpha) * p0 + u = beta * (S @ p) + (1 - beta) * u0 + + # Continue iterating if the error (absolute if less than 1, relative otherwise) + # is above the tolerance threshold for either p or u + err_u = np.absolute((u_last - u) / np.maximum(1.0, u_last)).sum() + if err_u >= len(u) * tol: + continue + err_p = np.absolute((p_last - p) / np.maximum(1.0, p_last)).sum() + if err_p >= len(p) * tol: + continue + + # Handle edge case where if both alpha and beta are 1, scale is + # indeterminate, so normalization is required to return consistent results + if alpha == 1 and beta == 1: + p = p / np.linalg.norm(p, 1) + u = u / np.linalg.norm(u, 1) + + # If both error thresholds pass, return a single dictionary mapping + # nodes to their scores + return dict( + zip(itertools.chain(top, bottom), map(float, itertools.chain(p, u))) + ) + + # If we reach this point, we have not converged + raise nx.PowerIterationFailedConvergence(max_iter) + + +def _clean_personalization_dict(personalization): + """Filter out zero values from the personalization dictionary, + handle case where None is passed, ensure values are non-negative.""" + if personalization is None: + return {} + if any(value < 0 for value in personalization.values()): + raise nx.NetworkXAlgorithmError("Personalization values must be non-negative.") + return {node: value for node, value in personalization.items() if value != 0} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matching.py new file mode 100644 index 0000000..6b577d6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matching.py @@ -0,0 +1,590 @@ +# This module uses material from the Wikipedia article Hopcroft--Karp algorithm +# , accessed on +# January 3, 2015, which is released under the Creative Commons +# Attribution-Share-Alike License 3.0 +# . That article includes +# pseudocode, which has been translated into the corresponding Python code. +# +# Portions of this module use code from David Eppstein's Python Algorithms and +# Data Structures (PADS) library, which is dedicated to the public domain (for +# proof, see ). +"""Provides functions for computing maximum cardinality matchings and minimum +weight full matchings in a bipartite graph. + +If you don't care about the particular implementation of the maximum matching +algorithm, simply use the :func:`maximum_matching`. If you do care, you can +import one of the named maximum matching algorithms directly. + +For example, to find a maximum matching in the complete bipartite graph with +two vertices on the left and three vertices on the right: + +>>> G = nx.complete_bipartite_graph(2, 3) +>>> left, right = nx.bipartite.sets(G) +>>> list(left) +[0, 1] +>>> list(right) +[2, 3, 4] +>>> nx.bipartite.maximum_matching(G) +{0: 2, 1: 3, 2: 0, 3: 1} + +The dictionary returned by :func:`maximum_matching` includes a mapping for +vertices in both the left and right vertex sets. + +Similarly, :func:`minimum_weight_full_matching` produces, for a complete +weighted bipartite graph, a matching whose cardinality is the cardinality of +the smaller of the two partitions, and for which the sum of the weights of the +edges included in the matching is minimal. + +""" + +import collections +import itertools + +import networkx as nx +from networkx.algorithms.bipartite import sets as bipartite_sets +from networkx.algorithms.bipartite.matrix import biadjacency_matrix + +__all__ = [ + "maximum_matching", + "hopcroft_karp_matching", + "eppstein_matching", + "to_vertex_cover", + "minimum_weight_full_matching", +] + +INFINITY = float("inf") + + +@nx._dispatchable +def hopcroft_karp_matching(G, top_nodes=None): + """Returns the maximum cardinality matching of the bipartite graph `G`. + + A matching is a set of edges that do not share any nodes. A maximum + cardinality matching is a matching with the most edges possible. It + is not always unique. Finding a matching in a bipartite graph can be + treated as a networkx flow problem. + + The functions ``hopcroft_karp_matching`` and ``maximum_matching`` + are aliases of the same function. + + Parameters + ---------- + G : NetworkX graph + + Undirected bipartite graph + + top_nodes : container of nodes + + Container with all nodes in one bipartite node set. If not supplied + it will be computed. But if more than one solution exists an exception + will be raised. + + Returns + ------- + matches : dictionary + + The matching is returned as a dictionary, `matches`, such that + ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched + nodes do not occur as a key in `matches`. + + Raises + ------ + AmbiguousSolution + Raised if the input bipartite graph is disconnected and no container + with all nodes in one bipartite set is provided. When determining + the nodes in each bipartite set more than one valid solution is + possible if the input graph is disconnected. + + Notes + ----- + This function is implemented with the `Hopcroft--Karp matching algorithm + `_ for + bipartite graphs. + + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + maximum_matching + hopcroft_karp_matching + eppstein_matching + + References + ---------- + .. [1] John E. Hopcroft and Richard M. Karp. "An n^{5 / 2} Algorithm for + Maximum Matchings in Bipartite Graphs" In: **SIAM Journal of Computing** + 2.4 (1973), pp. 225--231. . + + """ + + # First we define some auxiliary search functions. + # + # If you are a human reading these auxiliary search functions, the "global" + # variables `leftmatches`, `rightmatches`, `distances`, etc. are defined + # below the functions, so that they are initialized close to the initial + # invocation of the search functions. + def breadth_first_search(): + for v in left: + if leftmatches[v] is None: + distances[v] = 0 + queue.append(v) + else: + distances[v] = INFINITY + distances[None] = INFINITY + while queue: + v = queue.popleft() + if distances[v] < distances[None]: + for u in G[v]: + if distances[rightmatches[u]] is INFINITY: + distances[rightmatches[u]] = distances[v] + 1 + queue.append(rightmatches[u]) + return distances[None] is not INFINITY + + def depth_first_search(v): + if v is not None: + for u in G[v]: + if distances[rightmatches[u]] == distances[v] + 1: + if depth_first_search(rightmatches[u]): + rightmatches[u] = v + leftmatches[v] = u + return True + distances[v] = INFINITY + return False + return True + + # Initialize the "global" variables that maintain state during the search. + left, right = bipartite_sets(G, top_nodes) + leftmatches = dict.fromkeys(left) + rightmatches = dict.fromkeys(right) + distances = {} + queue = collections.deque() + + # Implementation note: this counter is incremented as pairs are matched but + # it is currently not used elsewhere in the computation. + num_matched_pairs = 0 + while breadth_first_search(): + for v in left: + if leftmatches[v] is None: + if depth_first_search(v): + num_matched_pairs += 1 + + # Strip the entries matched to `None`. + leftmatches = {k: v for k, v in leftmatches.items() if v is not None} + rightmatches = {k: v for k, v in rightmatches.items() if v is not None} + + # At this point, the left matches and the right matches are inverses of one + # another. In other words, + # + # leftmatches == {v, k for k, v in rightmatches.items()} + # + # Finally, we combine both the left matches and right matches. + return dict(itertools.chain(leftmatches.items(), rightmatches.items())) + + +@nx._dispatchable +def eppstein_matching(G, top_nodes=None): + """Returns the maximum cardinality matching of the bipartite graph `G`. + + Parameters + ---------- + G : NetworkX graph + + Undirected bipartite graph + + top_nodes : container + + Container with all nodes in one bipartite node set. If not supplied + it will be computed. But if more than one solution exists an exception + will be raised. + + Returns + ------- + matches : dictionary + + The matching is returned as a dictionary, `matching`, such that + ``matching[v] == w`` if node `v` is matched to node `w`. Unmatched + nodes do not occur as a key in `matching`. + + Raises + ------ + AmbiguousSolution + Raised if the input bipartite graph is disconnected and no container + with all nodes in one bipartite set is provided. When determining + the nodes in each bipartite set more than one valid solution is + possible if the input graph is disconnected. + + Notes + ----- + This function is implemented with David Eppstein's version of the algorithm + Hopcroft--Karp algorithm (see :func:`hopcroft_karp_matching`), which + originally appeared in the `Python Algorithms and Data Structures library + (PADS) `_. + + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + + hopcroft_karp_matching + + """ + # Due to its original implementation, a directed graph is needed + # so that the two sets of bipartite nodes can be distinguished + left, right = bipartite_sets(G, top_nodes) + G = nx.DiGraph(G.edges(left)) + # initialize greedy matching (redundant, but faster than full search) + matching = {} + for u in G: + for v in G[u]: + if v not in matching: + matching[v] = u + break + while True: + # structure residual graph into layers + # pred[u] gives the neighbor in the previous layer for u in U + # preds[v] gives a list of neighbors in the previous layer for v in V + # unmatched gives a list of unmatched vertices in final layer of V, + # and is also used as a flag value for pred[u] when u is in the first + # layer + preds = {} + unmatched = [] + pred = dict.fromkeys(G, unmatched) + for v in matching: + del pred[matching[v]] + layer = list(pred) + + # repeatedly extend layering structure by another pair of layers + while layer and not unmatched: + newLayer = {} + for u in layer: + for v in G[u]: + if v not in preds: + newLayer.setdefault(v, []).append(u) + layer = [] + for v in newLayer: + preds[v] = newLayer[v] + if v in matching: + layer.append(matching[v]) + pred[matching[v]] = v + else: + unmatched.append(v) + + # did we finish layering without finding any alternating paths? + if not unmatched: + # TODO - The lines between --- were unused and were thus commented + # out. This whole commented chunk should be reviewed to determine + # whether it should be built upon or completely removed. + # --- + # unlayered = {} + # for u in G: + # # TODO Why is extra inner loop necessary? + # for v in G[u]: + # if v not in preds: + # unlayered[v] = None + # --- + # TODO Originally, this function returned a three-tuple: + # + # return (matching, list(pred), list(unlayered)) + # + # For some reason, the documentation for this function + # indicated that the second and third elements of the returned + # three-tuple would be the vertices in the left and right vertex + # sets, respectively, that are also in the maximum independent set. + # However, what I think the author meant was that the second + # element is the list of vertices that were unmatched and the third + # element was the list of vertices that were matched. Since that + # seems to be the case, they don't really need to be returned, + # since that information can be inferred from the matching + # dictionary. + + # All the matched nodes must be a key in the dictionary + for key in matching.copy(): + matching[matching[key]] = key + return matching + + # recursively search backward through layers to find alternating paths + # recursion returns true if found path, false otherwise + def recurse(v): + if v in preds: + L = preds.pop(v) + for u in L: + if u in pred: + pu = pred.pop(u) + if pu is unmatched or recurse(pu): + matching[v] = u + return True + return False + + for v in unmatched: + recurse(v) + + +def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets): + """Returns True if and only if the vertex `v` is connected to one of + the target vertices by an alternating path in `G`. + + An *alternating path* is a path in which every other edge is in the + specified maximum matching (and the remaining edges in the path are not in + the matching). An alternating path may have matched edges in the even + positions or in the odd positions, as long as the edges alternate between + 'matched' and 'unmatched'. + + `G` is an undirected bipartite NetworkX graph. + + `v` is a vertex in `G`. + + `matched_edges` is a set of edges present in a maximum matching in `G`. + + `unmatched_edges` is a set of edges not present in a maximum + matching in `G`. + + `targets` is a set of vertices. + + """ + + def _alternating_dfs(u, along_matched=True): + """Returns True if and only if `u` is connected to one of the + targets by an alternating path. + + `u` is a vertex in the graph `G`. + + If `along_matched` is True, this step of the depth-first search + will continue only through edges in the given matching. Otherwise, it + will continue only through edges *not* in the given matching. + + """ + visited = set() + # Follow matched edges when depth is even, + # and follow unmatched edges when depth is odd. + initial_depth = 0 if along_matched else 1 + stack = [(u, iter(G[u]), initial_depth)] + while stack: + parent, children, depth = stack[-1] + valid_edges = matched_edges if depth % 2 else unmatched_edges + try: + child = next(children) + if child not in visited: + if (parent, child) in valid_edges or (child, parent) in valid_edges: + if child in targets: + return True + visited.add(child) + stack.append((child, iter(G[child]), depth + 1)) + except StopIteration: + stack.pop() + return False + + # Check for alternating paths starting with edges in the matching, then + # check for alternating paths starting with edges not in the + # matching. + return _alternating_dfs(v, along_matched=True) or _alternating_dfs( + v, along_matched=False + ) + + +def _connected_by_alternating_paths(G, matching, targets): + """Returns the set of vertices that are connected to one of the target + vertices by an alternating path in `G` or are themselves a target. + + An *alternating path* is a path in which every other edge is in the + specified maximum matching (and the remaining edges in the path are not in + the matching). An alternating path may have matched edges in the even + positions or in the odd positions, as long as the edges alternate between + 'matched' and 'unmatched'. + + `G` is an undirected bipartite NetworkX graph. + + `matching` is a dictionary representing a maximum matching in `G`, as + returned by, for example, :func:`maximum_matching`. + + `targets` is a set of vertices. + + """ + # Get the set of matched edges and the set of unmatched edges. Only include + # one version of each undirected edge (for example, include edge (1, 2) but + # not edge (2, 1)). Using frozensets as an intermediary step we do not + # require nodes to be orderable. + edge_sets = {frozenset((u, v)) for u, v in matching.items()} + matched_edges = {tuple(edge) for edge in edge_sets} + unmatched_edges = { + (u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets + } + + return { + v + for v in G + if v in targets + or _is_connected_by_alternating_path( + G, v, matched_edges, unmatched_edges, targets + ) + } + + +@nx._dispatchable +def to_vertex_cover(G, matching, top_nodes=None): + """Returns the minimum vertex cover corresponding to the given maximum + matching of the bipartite graph `G`. + + Parameters + ---------- + G : NetworkX graph + + Undirected bipartite graph + + matching : dictionary + + A dictionary whose keys are vertices in `G` and whose values are the + distinct neighbors comprising the maximum matching for `G`, as returned + by, for example, :func:`maximum_matching`. The dictionary *must* + represent the maximum matching. + + top_nodes : container + + Container with all nodes in one bipartite node set. If not supplied + it will be computed. But if more than one solution exists an exception + will be raised. + + Returns + ------- + vertex_cover : :class:`set` + + The minimum vertex cover in `G`. + + Raises + ------ + AmbiguousSolution + Raised if the input bipartite graph is disconnected and no container + with all nodes in one bipartite set is provided. When determining + the nodes in each bipartite set more than one valid solution is + possible if the input graph is disconnected. + + Notes + ----- + This function is implemented using the procedure guaranteed by `Konig's + theorem + `_, + which proves an equivalence between a maximum matching and a minimum vertex + cover in bipartite graphs. + + Since a minimum vertex cover is the complement of a maximum independent set + for any graph, one can compute the maximum independent set of a bipartite + graph this way: + + >>> G = nx.complete_bipartite_graph(2, 3) + >>> matching = nx.bipartite.maximum_matching(G) + >>> vertex_cover = nx.bipartite.to_vertex_cover(G, matching) + >>> independent_set = set(G) - vertex_cover + >>> print(list(independent_set)) + [2, 3, 4] + + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + """ + # This is a Python implementation of the algorithm described at + # . + L, R = bipartite_sets(G, top_nodes) + # Let U be the set of unmatched vertices in the left vertex set. + unmatched_vertices = set(G) - set(matching) + U = unmatched_vertices & L + # Let Z be the set of vertices that are either in U or are connected to U + # by alternating paths. + Z = _connected_by_alternating_paths(G, matching, U) + # At this point, every edge either has a right endpoint in Z or a left + # endpoint not in Z. This gives us the vertex cover. + return (L - Z) | (R & Z) + + +#: Returns the maximum cardinality matching in the given bipartite graph. +#: +#: This function is simply an alias for :func:`hopcroft_karp_matching`. +maximum_matching = hopcroft_karp_matching + + +@nx._dispatchable(edge_attrs="weight") +def minimum_weight_full_matching(G, top_nodes=None, weight="weight"): + r"""Returns a minimum weight full matching of the bipartite graph `G`. + + Let :math:`G = ((U, V), E)` be a weighted bipartite graph with real weights + :math:`w : E \to \mathbb{R}`. This function then produces a matching + :math:`M \subseteq E` with cardinality + + .. math:: + \lvert M \rvert = \min(\lvert U \rvert, \lvert V \rvert), + + which minimizes the sum of the weights of the edges included in the + matching, :math:`\sum_{e \in M} w(e)`, or raises an error if no such + matching exists. + + When :math:`\lvert U \rvert = \lvert V \rvert`, this is commonly + referred to as a perfect matching; here, since we allow + :math:`\lvert U \rvert` and :math:`\lvert V \rvert` to differ, we + follow Karp [1]_ and refer to the matching as *full*. + + Parameters + ---------- + G : NetworkX graph + + Undirected bipartite graph + + top_nodes : container + + Container with all nodes in one bipartite node set. If not supplied + it will be computed. + + weight : string, optional (default='weight') + + The edge data key used to provide each value in the matrix. + If None, then each edge has weight 1. + + Returns + ------- + matches : dictionary + + The matching is returned as a dictionary, `matches`, such that + ``matches[v] == w`` if node `v` is matched to node `w`. Unmatched + nodes do not occur as a key in `matches`. + + Raises + ------ + ValueError + Raised if no full matching exists. + + ImportError + Raised if SciPy is not available. + + Notes + ----- + The problem of determining a minimum weight full matching is also known as + the rectangular linear assignment problem. This implementation defers the + calculation of the assignment to SciPy. + + References + ---------- + .. [1] Richard Manning Karp: + An algorithm to Solve the m x n Assignment Problem in Expected Time + O(mn log n). + Networks, 10(2):143–152, 1980. + + """ + import numpy as np + import scipy as sp + + left, right = nx.bipartite.sets(G, top_nodes) + U = list(left) + V = list(right) + # We explicitly create the biadjacency matrix having infinities + # where edges are missing (as opposed to zeros, which is what one would + # get by using toarray on the sparse matrix). + weights_sparse = biadjacency_matrix( + G, row_order=U, column_order=V, weight=weight, format="coo" + ) + weights = np.full(weights_sparse.shape, np.inf) + weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data + left_matches = sp.optimize.linear_sum_assignment(weights) + d = {U[u]: V[v] for u, v in zip(*left_matches)} + # d will contain the matching from edges in left to right; we need to + # add the ones from right to left as well. + d.update({v: u for u, v in d.items()}) + return d diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matrix.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matrix.py new file mode 100644 index 0000000..c9143da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/matrix.py @@ -0,0 +1,168 @@ +""" +==================== +Biadjacency matrices +==================== +""" + +import itertools + +import networkx as nx +from networkx.convert_matrix import _generate_weighted_edges + +__all__ = ["biadjacency_matrix", "from_biadjacency_matrix"] + + +@nx._dispatchable(edge_attrs="weight") +def biadjacency_matrix( + G, row_order, column_order=None, dtype=None, weight="weight", format="csr" +): + r"""Returns the biadjacency matrix of the bipartite graph G. + + Let `G = (U, V, E)` be a bipartite graph with node sets + `U = u_{1},...,u_{r}` and `V = v_{1},...,v_{s}`. The biadjacency + matrix [1]_ is the `r` x `s` matrix `B` in which `b_{i,j} = 1` + if, and only if, `(u_i, v_j) \in E`. If the parameter `weight` is + not `None` and matches the name of an edge attribute, its value is + used instead of 1. + + Parameters + ---------- + G : graph + A NetworkX graph + + row_order : list of nodes + The rows of the matrix are ordered according to the list of nodes. + + column_order : list, optional + The columns of the matrix are ordered according to the list of nodes. + If column_order is None, then the ordering of columns is arbitrary. + + dtype : NumPy data-type, optional + A valid NumPy dtype used to initialize the array. If None, then the + NumPy default is used. + + weight : string or None, optional (default='weight') + The edge data key used to provide each value in the matrix. + If None, then each edge has weight 1. + + format : str in {'dense', 'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'} + The type of the matrix to be returned (default 'csr'). For + some algorithms different implementations of sparse matrices + can perform better. See [2]_ for details. + + Returns + ------- + M : SciPy sparse array + Biadjacency matrix representation of the bipartite graph G. + + Notes + ----- + No attempt is made to check that the input graph is bipartite. + + For directed bipartite graphs only successors are considered as neighbors. + To obtain an adjacency matrix with ones (or weight values) for both + predecessors and successors you have to generate two biadjacency matrices + where the rows of one of them are the columns of the other, and then add + one to the transpose of the other. + + See Also + -------- + adjacency_matrix + from_biadjacency_matrix + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph + .. [2] Scipy Dev. References, "Sparse Matrices", + https://docs.scipy.org/doc/scipy/reference/sparse.html + """ + import scipy as sp + + nlen = len(row_order) + if nlen == 0: + raise nx.NetworkXError("row_order is empty list") + if len(row_order) != len(set(row_order)): + msg = "Ambiguous ordering: `row_order` contained duplicates." + raise nx.NetworkXError(msg) + if column_order is None: + column_order = list(set(G) - set(row_order)) + mlen = len(column_order) + if len(column_order) != len(set(column_order)): + msg = "Ambiguous ordering: `column_order` contained duplicates." + raise nx.NetworkXError(msg) + + row_index = dict(zip(row_order, itertools.count())) + col_index = dict(zip(column_order, itertools.count())) + + if G.number_of_edges() == 0: + row, col, data = [], [], [] + else: + row, col, data = zip( + *( + (row_index[u], col_index[v], d.get(weight, 1)) + for u, v, d in G.edges(row_order, data=True) + if u in row_index and v in col_index + ) + ) + A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, mlen), dtype=dtype) + try: + return A.asformat(format) + except ValueError as err: + raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"): + r"""Creates a new bipartite graph from a biadjacency matrix given as a + SciPy sparse array. + + Parameters + ---------- + A: scipy sparse array + A biadjacency matrix representation of a graph + + create_using: NetworkX graph + Use specified graph for result. The default is Graph() + + edge_attribute: string + Name of edge attribute to store matrix numeric value. The data will + have the same type as the matrix entry (int, float, (real,imag)). + + Notes + ----- + The nodes are labeled with the attribute `bipartite` set to an integer + 0 or 1 representing membership in part 0 or part 1 of the bipartite graph. + + If `create_using` is an instance of :class:`networkx.MultiGraph` or + :class:`networkx.MultiDiGraph` and the entries of `A` are of + type :class:`int`, then this function returns a multigraph (of the same + type as `create_using`) with parallel edges. In this case, `edge_attribute` + will be ignored. + + See Also + -------- + biadjacency_matrix + from_numpy_array + + References + ---------- + [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph + """ + G = nx.empty_graph(0, create_using) + n, m = A.shape + # Make sure we get even the isolated nodes of the graph. + G.add_nodes_from(range(n), bipartite=0) + G.add_nodes_from(range(n, n + m), bipartite=1) + # Create an iterable over (u, v, w) triples and for each triple, add an + # edge from u to v with weight w. + triples = ((u, n + v, d) for (u, v, d) in _generate_weighted_edges(A)) + # If the entries in the adjacency matrix are integers and the graph is a + # multigraph, then create parallel edges, each with weight 1, for each + # entry in the adjacency matrix. Otherwise, create one edge for each + # positive entry in the adjacency matrix and set the weight of that edge to + # be the entry in the matrix. + if A.dtype.kind in ("i", "u") and G.is_multigraph(): + chain = itertools.chain.from_iterable + triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples) + G.add_weighted_edges_from(triples, weight=edge_attribute) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/projection.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/projection.py new file mode 100644 index 0000000..7c2a26c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/projection.py @@ -0,0 +1,526 @@ +"""One-mode (unipartite) projections of bipartite graphs.""" + +import networkx as nx +from networkx.exception import NetworkXAlgorithmError +from networkx.utils import not_implemented_for + +__all__ = [ + "projected_graph", + "weighted_projected_graph", + "collaboration_weighted_projected_graph", + "overlap_weighted_projected_graph", + "generic_weighted_projected_graph", +] + + +@nx._dispatchable( + graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True, returns_graph=True +) +def projected_graph(B, nodes, multigraph=False): + r"""Returns the projection of B onto one of its node sets. + + Returns the graph G that is the projection of the bipartite graph B + onto the specified nodes. They retain their attributes and are connected + in G if they have a common neighbor in B. + + Parameters + ---------- + B : NetworkX graph + The input graph should be bipartite. + + nodes : list or iterable + Nodes to project onto (the "bottom" nodes). + + multigraph: bool (default=False) + If True return a multigraph where the multiple edges represent multiple + shared neighbors. They edge key in the multigraph is assigned to the + label of the neighbor. + + Returns + ------- + Graph : NetworkX graph or multigraph + A graph that is the projection onto the given nodes. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> B = nx.path_graph(4) + >>> G = bipartite.projected_graph(B, [1, 3]) + >>> list(G) + [1, 3] + >>> list(G.edges()) + [(1, 3)] + + If nodes `a`, and `b` are connected through both nodes 1 and 2 then + building a multigraph results in two edges in the projection onto + [`a`, `b`]: + + >>> B = nx.Graph() + >>> B.add_edges_from([("a", 1), ("b", 1), ("a", 2), ("b", 2)]) + >>> G = bipartite.projected_graph(B, ["a", "b"], multigraph=True) + >>> print([sorted((u, v)) for u, v in G.edges()]) + [['a', 'b'], ['a', 'b']] + + Notes + ----- + No attempt is made to verify that the input graph B is bipartite. + Returns a simple graph that is the projection of the bipartite graph B + onto the set of nodes given in list nodes. If multigraph=True then + a multigraph is returned with an edge for every shared neighbor. + + Directed graphs are allowed as input. The output will also then + be a directed graph with edges if there is a directed path between + the nodes. + + The graph and node properties are (shallow) copied to the projected graph. + + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + is_bipartite, + is_bipartite_node_set, + sets, + weighted_projected_graph, + collaboration_weighted_projected_graph, + overlap_weighted_projected_graph, + generic_weighted_projected_graph + """ + if B.is_multigraph(): + raise nx.NetworkXError("not defined for multigraphs") + if B.is_directed(): + directed = True + if multigraph: + G = nx.MultiDiGraph() + else: + G = nx.DiGraph() + else: + directed = False + if multigraph: + G = nx.MultiGraph() + else: + G = nx.Graph() + G.graph.update(B.graph) + G.add_nodes_from((n, B.nodes[n]) for n in nodes) + for u in nodes: + nbrs2 = {v for nbr in B[u] for v in B[nbr] if v != u} + if multigraph: + for n in nbrs2: + if directed: + links = set(B[u]) & set(B.pred[n]) + else: + links = set(B[u]) & set(B[n]) + for l in links: + if not G.has_edge(u, n, l): + G.add_edge(u, n, key=l) + else: + G.add_edges_from((u, n) for n in nbrs2) + return G + + +@not_implemented_for("multigraph") +@nx._dispatchable(graphs="B", returns_graph=True) +def weighted_projected_graph(B, nodes, ratio=False): + r"""Returns a weighted projection of B onto one of its node sets. + + The weighted projected graph is the projection of the bipartite + network B onto the specified nodes with weights representing the + number of shared neighbors or the ratio between actual shared + neighbors and possible shared neighbors if ``ratio is True`` [1]_. + The nodes retain their attributes and are connected in the resulting + graph if they have an edge to a common node in the original graph. + + Parameters + ---------- + B : NetworkX graph + The input graph should be bipartite. + + nodes : list or iterable + Distinct nodes to project onto (the "bottom" nodes). + + ratio: Bool (default=False) + If True, edge weight is the ratio between actual shared neighbors + and maximum possible shared neighbors (i.e., the size of the other + node set). If False, edges weight is the number of shared neighbors. + + Returns + ------- + Graph : NetworkX graph + A graph that is the projection onto the given nodes. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> B = nx.path_graph(4) + >>> G = bipartite.weighted_projected_graph(B, [1, 3]) + >>> list(G) + [1, 3] + >>> list(G.edges(data=True)) + [(1, 3, {'weight': 1})] + >>> G = bipartite.weighted_projected_graph(B, [1, 3], ratio=True) + >>> list(G.edges(data=True)) + [(1, 3, {'weight': 0.5})] + + Notes + ----- + No attempt is made to verify that the input graph B is bipartite, or that + the input nodes are distinct. However, if the length of the input nodes is + greater than or equal to the nodes in the graph B, an exception is raised. + If the nodes are not distinct but don't raise this error, the output weights + will be incorrect. + The graph and node properties are (shallow) copied to the projected graph. + + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + is_bipartite, + is_bipartite_node_set, + sets, + collaboration_weighted_projected_graph, + overlap_weighted_projected_graph, + generic_weighted_projected_graph + projected_graph + + References + ---------- + .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation + Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook + of Social Network Analysis. Sage Publications. + """ + if B.is_directed(): + pred = B.pred + G = nx.DiGraph() + else: + pred = B.adj + G = nx.Graph() + G.graph.update(B.graph) + G.add_nodes_from((n, B.nodes[n]) for n in nodes) + n_top = len(B) - len(nodes) + + if n_top < 1: + raise NetworkXAlgorithmError( + f"the size of the nodes to project onto ({len(nodes)}) is >= the graph size ({len(B)}).\n" + "They are either not a valid bipartite partition or contain duplicates" + ) + + for u in nodes: + unbrs = set(B[u]) + nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u} + for v in nbrs2: + vnbrs = set(pred[v]) + common = unbrs & vnbrs + if not ratio: + weight = len(common) + else: + weight = len(common) / n_top + G.add_edge(u, v, weight=weight) + return G + + +@not_implemented_for("multigraph") +@nx._dispatchable(graphs="B", returns_graph=True) +def collaboration_weighted_projected_graph(B, nodes): + r"""Newman's weighted projection of B onto one of its node sets. + + The collaboration weighted projection is the projection of the + bipartite network B onto the specified nodes with weights assigned + using Newman's collaboration model [1]_: + + .. math:: + + w_{u, v} = \sum_k \frac{\delta_{u}^{k} \delta_{v}^{k}}{d_k - 1} + + where `u` and `v` are nodes from the bottom bipartite node set, + and `k` is a node of the top node set. + The value `d_k` is the degree of node `k` in the bipartite + network and `\delta_{u}^{k}` is 1 if node `u` is + linked to node `k` in the original bipartite graph or 0 otherwise. + + The nodes retain their attributes and are connected in the resulting + graph if have an edge to a common node in the original bipartite + graph. + + Parameters + ---------- + B : NetworkX graph + The input graph should be bipartite. + + nodes : list or iterable + Nodes to project onto (the "bottom" nodes). + + Returns + ------- + Graph : NetworkX graph + A graph that is the projection onto the given nodes. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> B = nx.path_graph(5) + >>> B.add_edge(1, 5) + >>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5]) + >>> list(G) + [0, 2, 4, 5] + >>> for edge in sorted(G.edges(data=True)): + ... print(edge) + (0, 2, {'weight': 0.5}) + (0, 5, {'weight': 0.5}) + (2, 4, {'weight': 1.0}) + (2, 5, {'weight': 0.5}) + + Notes + ----- + No attempt is made to verify that the input graph B is bipartite. + The graph and node properties are (shallow) copied to the projected graph. + + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + is_bipartite, + is_bipartite_node_set, + sets, + weighted_projected_graph, + overlap_weighted_projected_graph, + generic_weighted_projected_graph, + projected_graph + + References + ---------- + .. [1] Scientific collaboration networks: II. + Shortest paths, weighted networks, and centrality, + M. E. J. Newman, Phys. Rev. E 64, 016132 (2001). + """ + if B.is_directed(): + pred = B.pred + G = nx.DiGraph() + else: + pred = B.adj + G = nx.Graph() + G.graph.update(B.graph) + G.add_nodes_from((n, B.nodes[n]) for n in nodes) + for u in nodes: + unbrs = set(B[u]) + nbrs2 = {n for nbr in unbrs for n in B[nbr] if n != u} + for v in nbrs2: + vnbrs = set(pred[v]) + common_degree = (len(B[n]) for n in unbrs & vnbrs) + weight = sum(1.0 / (deg - 1) for deg in common_degree if deg > 1) + G.add_edge(u, v, weight=weight) + return G + + +@not_implemented_for("multigraph") +@nx._dispatchable(graphs="B", returns_graph=True) +def overlap_weighted_projected_graph(B, nodes, jaccard=True): + r"""Overlap weighted projection of B onto one of its node sets. + + The overlap weighted projection is the projection of the bipartite + network B onto the specified nodes with weights representing + the Jaccard index between the neighborhoods of the two nodes in the + original bipartite network [1]_: + + .. math:: + + w_{v, u} = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|} + + or if the parameter 'jaccard' is False, the fraction of common + neighbors by minimum of both nodes degree in the original + bipartite graph [1]_: + + .. math:: + + w_{v, u} = \frac{|N(u) \cap N(v)|}{min(|N(u)|, |N(v)|)} + + The nodes retain their attributes and are connected in the resulting + graph if have an edge to a common node in the original bipartite graph. + + Parameters + ---------- + B : NetworkX graph + The input graph should be bipartite. + + nodes : list or iterable + Nodes to project onto (the "bottom" nodes). + + jaccard: Bool (default=True) + + Returns + ------- + Graph : NetworkX graph + A graph that is the projection onto the given nodes. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> B = nx.path_graph(5) + >>> nodes = [0, 2, 4] + >>> G = bipartite.overlap_weighted_projected_graph(B, nodes) + >>> list(G) + [0, 2, 4] + >>> list(G.edges(data=True)) + [(0, 2, {'weight': 0.5}), (2, 4, {'weight': 0.5})] + >>> G = bipartite.overlap_weighted_projected_graph(B, nodes, jaccard=False) + >>> list(G.edges(data=True)) + [(0, 2, {'weight': 1.0}), (2, 4, {'weight': 1.0})] + + Notes + ----- + No attempt is made to verify that the input graph B is bipartite. + The graph and node properties are (shallow) copied to the projected graph. + + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + is_bipartite, + is_bipartite_node_set, + sets, + weighted_projected_graph, + collaboration_weighted_projected_graph, + generic_weighted_projected_graph, + projected_graph + + References + ---------- + .. [1] Borgatti, S.P. and Halgin, D. In press. Analyzing Affiliation + Networks. In Carrington, P. and Scott, J. (eds) The Sage Handbook + of Social Network Analysis. Sage Publications. + + """ + if B.is_directed(): + pred = B.pred + G = nx.DiGraph() + else: + pred = B.adj + G = nx.Graph() + G.graph.update(B.graph) + G.add_nodes_from((n, B.nodes[n]) for n in nodes) + for u in nodes: + unbrs = set(B[u]) + nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u} + for v in nbrs2: + vnbrs = set(pred[v]) + if jaccard: + wt = len(unbrs & vnbrs) / len(unbrs | vnbrs) + else: + wt = len(unbrs & vnbrs) / min(len(unbrs), len(vnbrs)) + G.add_edge(u, v, weight=wt) + return G + + +@not_implemented_for("multigraph") +@nx._dispatchable(graphs="B", preserve_all_attrs=True, returns_graph=True) +def generic_weighted_projected_graph(B, nodes, weight_function=None): + r"""Weighted projection of B with a user-specified weight function. + + The bipartite network B is projected on to the specified nodes + with weights computed by a user-specified function. This function + must accept as a parameter the neighborhood sets of two nodes and + return an integer or a float. + + The nodes retain their attributes and are connected in the resulting graph + if they have an edge to a common node in the original graph. + + Parameters + ---------- + B : NetworkX graph + The input graph should be bipartite. + + nodes : list or iterable + Nodes to project onto (the "bottom" nodes). + + weight_function : function + This function must accept as parameters the same input graph + that this function, and two nodes; and return an integer or a float. + The default function computes the number of shared neighbors. + + Returns + ------- + Graph : NetworkX graph + A graph that is the projection onto the given nodes. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> # Define some custom weight functions + >>> def jaccard(G, u, v): + ... unbrs = set(G[u]) + ... vnbrs = set(G[v]) + ... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs) + >>> def my_weight(G, u, v, weight="weight"): + ... w = 0 + ... for nbr in set(G[u]) & set(G[v]): + ... w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1) + ... return w + >>> # A complete bipartite graph with 4 nodes and 4 edges + >>> B = nx.complete_bipartite_graph(2, 2) + >>> # Add some arbitrary weight to the edges + >>> for i, (u, v) in enumerate(B.edges()): + ... B.edges[u, v]["weight"] = i + 1 + >>> for edge in B.edges(data=True): + ... print(edge) + (0, 2, {'weight': 1}) + (0, 3, {'weight': 2}) + (1, 2, {'weight': 3}) + (1, 3, {'weight': 4}) + >>> # By default, the weight is the number of shared neighbors + >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1]) + >>> print(list(G.edges(data=True))) + [(0, 1, {'weight': 2})] + >>> # To specify a custom weight function use the weight_function parameter + >>> G = bipartite.generic_weighted_projected_graph( + ... B, [0, 1], weight_function=jaccard + ... ) + >>> print(list(G.edges(data=True))) + [(0, 1, {'weight': 1.0})] + >>> G = bipartite.generic_weighted_projected_graph( + ... B, [0, 1], weight_function=my_weight + ... ) + >>> print(list(G.edges(data=True))) + [(0, 1, {'weight': 10})] + + Notes + ----- + No attempt is made to verify that the input graph B is bipartite. + The graph and node properties are (shallow) copied to the projected graph. + + See :mod:`bipartite documentation ` + for further details on how bipartite graphs are handled in NetworkX. + + See Also + -------- + is_bipartite, + is_bipartite_node_set, + sets, + weighted_projected_graph, + collaboration_weighted_projected_graph, + overlap_weighted_projected_graph, + projected_graph + + """ + if B.is_directed(): + pred = B.pred + G = nx.DiGraph() + else: + pred = B.adj + G = nx.Graph() + if weight_function is None: + + def weight_function(G, u, v): + # Notice that we use set(pred[v]) for handling the directed case. + return len(set(G[u]) & set(pred[v])) + + G.graph.update(B.graph) + G.add_nodes_from((n, B.nodes[n]) for n in nodes) + for u in nodes: + nbrs2 = {n for nbr in set(B[u]) for n in B[nbr]} - {u} + for v in nbrs2: + weight = weight_function(B, u, v) + G.add_edge(u, v, weight=weight) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/redundancy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/redundancy.py new file mode 100644 index 0000000..b622b97 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/redundancy.py @@ -0,0 +1,112 @@ +"""Node redundancy for bipartite graphs.""" + +from itertools import combinations + +import networkx as nx +from networkx import NetworkXError + +__all__ = ["node_redundancy"] + + +@nx._dispatchable +def node_redundancy(G, nodes=None): + r"""Computes the node redundancy coefficients for the nodes in the bipartite + graph `G`. + + The redundancy coefficient of a node `v` is the fraction of pairs of + neighbors of `v` that are both linked to other nodes. In a one-mode + projection these nodes would be linked together even if `v` were + not there. + + More formally, for any vertex `v`, the *redundancy coefficient of `v`* is + defined by + + .. math:: + + rc(v) = \frac{|\{\{u, w\} \subseteq N(v), + \: \exists v' \neq v,\: (v',u) \in E\: + \mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}}, + + where `N(v)` is the set of neighbors of `v` in `G`. + + Parameters + ---------- + G : graph + A bipartite graph + + nodes : list or iterable (optional) + Compute redundancy for these nodes. The default is all nodes in G. + + Returns + ------- + redundancy : dictionary + A dictionary keyed by node with the node redundancy value. + + Examples + -------- + Compute the redundancy coefficient of each node in a graph:: + + >>> from networkx.algorithms import bipartite + >>> G = nx.cycle_graph(4) + >>> rc = bipartite.node_redundancy(G) + >>> rc[0] + 1.0 + + Compute the average redundancy for the graph:: + + >>> from networkx.algorithms import bipartite + >>> G = nx.cycle_graph(4) + >>> rc = bipartite.node_redundancy(G) + >>> sum(rc.values()) / len(G) + 1.0 + + Compute the average redundancy for a set of nodes:: + + >>> from networkx.algorithms import bipartite + >>> G = nx.cycle_graph(4) + >>> rc = bipartite.node_redundancy(G) + >>> nodes = [0, 2] + >>> sum(rc[n] for n in nodes) / len(nodes) + 1.0 + + Raises + ------ + NetworkXError + If any of the nodes in the graph (or in `nodes`, if specified) has + (out-)degree less than two (which would result in division by zero, + according to the definition of the redundancy coefficient). + + References + ---------- + .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). + Basic notions for the analysis of large two-mode networks. + Social Networks 30(1), 31--48. + + """ + if nodes is None: + nodes = G + if any(len(G[v]) < 2 for v in nodes): + raise NetworkXError( + "Cannot compute redundancy coefficient for a node" + " that has fewer than two neighbors." + ) + # TODO This can be trivially parallelized. + return {v: _node_redundancy(G, v) for v in nodes} + + +def _node_redundancy(G, v): + """Returns the redundancy of the node `v` in the bipartite graph `G`. + + If `G` is a graph with `n` nodes, the redundancy of a node is the ratio + of the "overlap" of `v` to the maximum possible overlap of `v` + according to its degree. The overlap of `v` is the number of pairs of + neighbors that have mutual neighbors themselves, other than `v`. + + `v` must have at least two neighbors in `G`. + + """ + n = len(G[v]) + overlap = sum( + 1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v} + ) + return (2 * overlap) / (n * (n - 1)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/spectral.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/spectral.py new file mode 100644 index 0000000..cb9388f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/spectral.py @@ -0,0 +1,69 @@ +""" +Spectral bipartivity measure. +""" + +import networkx as nx + +__all__ = ["spectral_bipartivity"] + + +@nx._dispatchable(edge_attrs="weight") +def spectral_bipartivity(G, nodes=None, weight="weight"): + """Returns the spectral bipartivity. + + Parameters + ---------- + G : NetworkX graph + + nodes : list or container optional(default is all nodes) + Nodes to return value of spectral bipartivity contribution. + + weight : string or None optional (default = 'weight') + Edge data key to use for edge weights. If None, weights set to 1. + + Returns + ------- + sb : float or dict + A single number if the keyword nodes is not specified, or + a dictionary keyed by node with the spectral bipartivity contribution + of that node as the value. + + Examples + -------- + >>> from networkx.algorithms import bipartite + >>> G = nx.path_graph(4) + >>> bipartite.spectral_bipartivity(G) + 1.0 + + Notes + ----- + This implementation uses Numpy (dense) matrices which are not efficient + for storing large sparse graphs. + + See Also + -------- + color + + References + ---------- + .. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of + bipartivity in complex networks", PhysRev E 72, 046105 (2005) + """ + import scipy as sp + + nodelist = list(G) # ordering of nodes in matrix + A = nx.to_numpy_array(G, nodelist, weight=weight) + expA = sp.linalg.expm(A) + expmA = sp.linalg.expm(-A) + coshA = 0.5 * (expA + expmA) + if nodes is None: + # return single number for entire graph + return float(coshA.diagonal().sum() / expA.diagonal().sum()) + else: + # contribution for individual nodes + index = dict(zip(nodelist, range(len(nodelist)))) + sb = {} + for n in nodes: + i = index[n] + sb[n] = coshA.item(i, i) / expA.item(i, i) + return sb diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1eada55 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-312.pyc new file mode 100644 index 0000000..8f66226 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_basic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-312.pyc new file mode 100644 index 0000000..9ed5225 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-312.pyc new file mode 100644 index 0000000..5edfdfc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_cluster.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-312.pyc new file mode 100644 index 0000000..c786d98 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_covering.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-312.pyc new file mode 100644 index 0000000..128226d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_edgelist.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-312.pyc new file mode 100644 index 0000000..fc99923 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_extendability.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-312.pyc new file mode 100644 index 0000000..5bf4809 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_generators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_link_analysis.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_link_analysis.cpython-312.pyc new file mode 100644 index 0000000..4bf9472 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_link_analysis.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-312.pyc new file mode 100644 index 0000000..32497d5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matching.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-312.pyc new file mode 100644 index 0000000..21c350a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_matrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-312.pyc new file mode 100644 index 0000000..1f027e9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_project.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-312.pyc new file mode 100644 index 0000000..458f917 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_redundancy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-312.pyc new file mode 100644 index 0000000..3b303fd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/__pycache__/test_spectral_bipartivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_basic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_basic.py new file mode 100644 index 0000000..655506b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_basic.py @@ -0,0 +1,125 @@ +import pytest + +import networkx as nx +from networkx.algorithms import bipartite + + +class TestBipartiteBasic: + def test_is_bipartite(self): + assert bipartite.is_bipartite(nx.path_graph(4)) + assert bipartite.is_bipartite(nx.DiGraph([(1, 0)])) + assert not bipartite.is_bipartite(nx.complete_graph(3)) + + def test_bipartite_color(self): + G = nx.path_graph(4) + c = bipartite.color(G) + assert c == {0: 1, 1: 0, 2: 1, 3: 0} + + def test_not_bipartite_color(self): + with pytest.raises(nx.NetworkXError): + c = bipartite.color(nx.complete_graph(4)) + + def test_bipartite_directed(self): + G = bipartite.random_graph(10, 10, 0.1, directed=True) + assert bipartite.is_bipartite(G) + + def test_bipartite_sets(self): + G = nx.path_graph(4) + X, Y = bipartite.sets(G) + assert X == {0, 2} + assert Y == {1, 3} + + def test_bipartite_sets_directed(self): + G = nx.path_graph(4) + D = G.to_directed() + X, Y = bipartite.sets(D) + assert X == {0, 2} + assert Y == {1, 3} + + def test_bipartite_sets_given_top_nodes(self): + G = nx.path_graph(4) + top_nodes = [0, 2] + X, Y = bipartite.sets(G, top_nodes) + assert X == {0, 2} + assert Y == {1, 3} + + def test_bipartite_sets_disconnected(self): + with pytest.raises(nx.AmbiguousSolution): + G = nx.path_graph(4) + G.add_edges_from([(5, 6), (6, 7)]) + X, Y = bipartite.sets(G) + + def test_is_bipartite_node_set(self): + G = nx.path_graph(4) + + with pytest.raises(nx.AmbiguousSolution): + bipartite.is_bipartite_node_set(G, [1, 1, 2, 3]) + + assert bipartite.is_bipartite_node_set(G, [0, 2]) + assert bipartite.is_bipartite_node_set(G, [1, 3]) + assert not bipartite.is_bipartite_node_set(G, [1, 2]) + G.add_edge(10, 20) + assert bipartite.is_bipartite_node_set(G, [0, 2, 10]) + assert bipartite.is_bipartite_node_set(G, [0, 2, 20]) + assert bipartite.is_bipartite_node_set(G, [1, 3, 10]) + assert bipartite.is_bipartite_node_set(G, [1, 3, 20]) + + def test_bipartite_density(self): + G = nx.path_graph(5) + X, Y = bipartite.sets(G) + density = len(list(G.edges())) / (len(X) * len(Y)) + assert bipartite.density(G, X) == density + D = nx.DiGraph(G.edges()) + assert bipartite.density(D, X) == density / 2.0 + assert bipartite.density(nx.Graph(), {}) == 0.0 + + def test_bipartite_degrees(self): + G = nx.path_graph(5) + X = {1, 3} + Y = {0, 2, 4} + u, d = bipartite.degrees(G, Y) + assert dict(u) == {1: 2, 3: 2} + assert dict(d) == {0: 1, 2: 2, 4: 1} + + def test_bipartite_weighted_degrees(self): + G = nx.path_graph(5) + G.add_edge(0, 1, weight=0.1, other=0.2) + X = {1, 3} + Y = {0, 2, 4} + u, d = bipartite.degrees(G, Y, weight="weight") + assert dict(u) == {1: 1.1, 3: 2} + assert dict(d) == {0: 0.1, 2: 2, 4: 1} + u, d = bipartite.degrees(G, Y, weight="other") + assert dict(u) == {1: 1.2, 3: 2} + assert dict(d) == {0: 0.2, 2: 2, 4: 1} + + def test_biadjacency_matrix_weight(self): + pytest.importorskip("scipy") + G = nx.path_graph(5) + G.add_edge(0, 1, weight=2, other=4) + X = [1, 3] + Y = [0, 2, 4] + M = bipartite.biadjacency_matrix(G, X, weight="weight") + assert M[0, 0] == 2 + M = bipartite.biadjacency_matrix(G, X, weight="other") + assert M[0, 0] == 4 + + def test_biadjacency_matrix(self): + pytest.importorskip("scipy") + tops = [2, 5, 10] + bots = [5, 10, 15] + for i in range(len(tops)): + G = bipartite.random_graph(tops[i], bots[i], 0.2) + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] + M = bipartite.biadjacency_matrix(G, top) + assert M.shape[0] == tops[i] + assert M.shape[1] == bots[i] + + def test_biadjacency_matrix_order(self): + pytest.importorskip("scipy") + G = nx.path_graph(5) + G.add_edge(0, 1, weight=2) + X = [3, 1] + Y = [4, 2, 0] + M = bipartite.biadjacency_matrix(G, X, Y, weight="weight") + assert M[1, 2] == 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py new file mode 100644 index 0000000..19fb5d1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_centrality.py @@ -0,0 +1,192 @@ +import pytest + +import networkx as nx +from networkx.algorithms import bipartite + + +class TestBipartiteCentrality: + @classmethod + def setup_class(cls): + cls.P4 = nx.path_graph(4) + cls.K3 = nx.complete_bipartite_graph(3, 3) + cls.C4 = nx.cycle_graph(4) + cls.davis = nx.davis_southern_women_graph() + cls.top_nodes = [ + n for n, d in cls.davis.nodes(data=True) if d["bipartite"] == 0 + ] + + def test_degree_centrality(self): + d = bipartite.degree_centrality(self.P4, [1, 3]) + answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5} + assert d == answer + d = bipartite.degree_centrality(self.K3, [0, 1, 2]) + answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} + assert d == answer + d = bipartite.degree_centrality(self.C4, [0, 2]) + answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} + assert d == answer + + def test_betweenness_centrality(self): + c = bipartite.betweenness_centrality(self.P4, [1, 3]) + answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0} + assert c == answer + c = bipartite.betweenness_centrality(self.K3, [0, 1, 2]) + answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125} + assert c == answer + c = bipartite.betweenness_centrality(self.C4, [0, 2]) + answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25} + assert c == answer + + def test_closeness_centrality(self): + c = bipartite.closeness_centrality(self.P4, [1, 3]) + answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3} + assert c == answer + c = bipartite.closeness_centrality(self.K3, [0, 1, 2]) + answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} + assert c == answer + c = bipartite.closeness_centrality(self.C4, [0, 2]) + answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} + assert c == answer + G = nx.Graph() + G.add_node(0) + G.add_node(1) + c = bipartite.closeness_centrality(G, [0]) + assert c == {0: 0.0, 1: 0.0} + c = bipartite.closeness_centrality(G, [1]) + assert c == {0: 0.0, 1: 0.0} + + def test_bipartite_closeness_centrality_unconnected(self): + G = nx.complete_bipartite_graph(3, 3) + G.add_edge(6, 7) + c = bipartite.closeness_centrality(G, [0, 2, 4, 6], normalized=False) + answer = { + 0: 10.0 / 7, + 2: 10.0 / 7, + 4: 10.0 / 7, + 6: 10.0, + 1: 10.0 / 7, + 3: 10.0 / 7, + 5: 10.0 / 7, + 7: 10.0, + } + assert c == answer + + def test_davis_degree_centrality(self): + G = self.davis + deg = bipartite.degree_centrality(G, self.top_nodes) + answer = { + "E8": 0.78, + "E9": 0.67, + "E7": 0.56, + "Nora Fayette": 0.57, + "Evelyn Jefferson": 0.57, + "Theresa Anderson": 0.57, + "E6": 0.44, + "Sylvia Avondale": 0.50, + "Laura Mandeville": 0.50, + "Brenda Rogers": 0.50, + "Katherina Rogers": 0.43, + "E5": 0.44, + "Helen Lloyd": 0.36, + "E3": 0.33, + "Ruth DeSand": 0.29, + "Verne Sanderson": 0.29, + "E12": 0.33, + "Myra Liddel": 0.29, + "E11": 0.22, + "Eleanor Nye": 0.29, + "Frances Anderson": 0.29, + "Pearl Oglethorpe": 0.21, + "E4": 0.22, + "Charlotte McDowd": 0.29, + "E10": 0.28, + "Olivia Carleton": 0.14, + "Flora Price": 0.14, + "E2": 0.17, + "E1": 0.17, + "Dorothy Murchison": 0.14, + "E13": 0.17, + "E14": 0.17, + } + for node, value in answer.items(): + assert value == pytest.approx(deg[node], abs=1e-2) + + def test_davis_betweenness_centrality(self): + G = self.davis + bet = bipartite.betweenness_centrality(G, self.top_nodes) + answer = { + "E8": 0.24, + "E9": 0.23, + "E7": 0.13, + "Nora Fayette": 0.11, + "Evelyn Jefferson": 0.10, + "Theresa Anderson": 0.09, + "E6": 0.07, + "Sylvia Avondale": 0.07, + "Laura Mandeville": 0.05, + "Brenda Rogers": 0.05, + "Katherina Rogers": 0.05, + "E5": 0.04, + "Helen Lloyd": 0.04, + "E3": 0.02, + "Ruth DeSand": 0.02, + "Verne Sanderson": 0.02, + "E12": 0.02, + "Myra Liddel": 0.02, + "E11": 0.02, + "Eleanor Nye": 0.01, + "Frances Anderson": 0.01, + "Pearl Oglethorpe": 0.01, + "E4": 0.01, + "Charlotte McDowd": 0.01, + "E10": 0.01, + "Olivia Carleton": 0.01, + "Flora Price": 0.01, + "E2": 0.00, + "E1": 0.00, + "Dorothy Murchison": 0.00, + "E13": 0.00, + "E14": 0.00, + } + for node, value in answer.items(): + assert value == pytest.approx(bet[node], abs=1e-2) + + def test_davis_closeness_centrality(self): + G = self.davis + clos = bipartite.closeness_centrality(G, self.top_nodes) + answer = { + "E8": 0.85, + "E9": 0.79, + "E7": 0.73, + "Nora Fayette": 0.80, + "Evelyn Jefferson": 0.80, + "Theresa Anderson": 0.80, + "E6": 0.69, + "Sylvia Avondale": 0.77, + "Laura Mandeville": 0.73, + "Brenda Rogers": 0.73, + "Katherina Rogers": 0.73, + "E5": 0.59, + "Helen Lloyd": 0.73, + "E3": 0.56, + "Ruth DeSand": 0.71, + "Verne Sanderson": 0.71, + "E12": 0.56, + "Myra Liddel": 0.69, + "E11": 0.54, + "Eleanor Nye": 0.67, + "Frances Anderson": 0.67, + "Pearl Oglethorpe": 0.67, + "E4": 0.54, + "Charlotte McDowd": 0.60, + "E10": 0.55, + "Olivia Carleton": 0.59, + "Flora Price": 0.59, + "E2": 0.52, + "E1": 0.52, + "Dorothy Murchison": 0.65, + "E13": 0.52, + "E14": 0.52, + } + for node, value in answer.items(): + assert value == pytest.approx(clos[node], abs=1e-2) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py new file mode 100644 index 0000000..72e2dba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_cluster.py @@ -0,0 +1,84 @@ +import pytest + +import networkx as nx +from networkx.algorithms import bipartite +from networkx.algorithms.bipartite.cluster import cc_dot, cc_max, cc_min + + +def test_pairwise_bipartite_cc_functions(): + # Test functions for different kinds of bipartite clustering coefficients + # between pairs of nodes using 3 example graphs from figure 5 p. 40 + # Latapy et al (2008) + G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)]) + G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)]) + G3 = nx.Graph( + [(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)] + ) + result = { + 0: [1 / 3.0, 2 / 3.0, 2 / 5.0], + 1: [1 / 2.0, 2 / 3.0, 2 / 3.0], + 2: [2 / 8.0, 2 / 5.0, 2 / 5.0], + } + for i, G in enumerate([G1, G2, G3]): + assert bipartite.is_bipartite(G) + assert cc_dot(set(G[0]), set(G[1])) == result[i][0] + assert cc_min(set(G[0]), set(G[1])) == result[i][1] + assert cc_max(set(G[0]), set(G[1])) == result[i][2] + + +def test_star_graph(): + G = nx.star_graph(3) + # all modes are the same + answer = {0: 0, 1: 1, 2: 1, 3: 1} + assert bipartite.clustering(G, mode="dot") == answer + assert bipartite.clustering(G, mode="min") == answer + assert bipartite.clustering(G, mode="max") == answer + + +def test_not_bipartite(): + with pytest.raises(nx.NetworkXError): + bipartite.clustering(nx.complete_graph(4)) + + +def test_bad_mode(): + with pytest.raises(nx.NetworkXError): + bipartite.clustering(nx.path_graph(4), mode="foo") + + +def test_path_graph(): + G = nx.path_graph(4) + answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5} + assert bipartite.clustering(G, mode="dot") == answer + assert bipartite.clustering(G, mode="max") == answer + answer = {0: 1, 1: 1, 2: 1, 3: 1} + assert bipartite.clustering(G, mode="min") == answer + + +def test_average_path_graph(): + G = nx.path_graph(4) + assert bipartite.average_clustering(G, mode="dot") == 0.5 + assert bipartite.average_clustering(G, mode="max") == 0.5 + assert bipartite.average_clustering(G, mode="min") == 1 + + +def test_ra_clustering_davis(): + G = nx.davis_southern_women_graph() + cc4 = round(bipartite.robins_alexander_clustering(G), 3) + assert cc4 == 0.468 + + +def test_ra_clustering_square(): + G = nx.path_graph(4) + G.add_edge(0, 3) + assert bipartite.robins_alexander_clustering(G) == 1.0 + + +def test_ra_clustering_zero(): + G = nx.Graph() + assert bipartite.robins_alexander_clustering(G) == 0 + G.add_nodes_from(range(4)) + assert bipartite.robins_alexander_clustering(G) == 0 + G.add_edges_from([(0, 1), (2, 3), (3, 4)]) + assert bipartite.robins_alexander_clustering(G) == 0 + G.add_edge(1, 2) + assert bipartite.robins_alexander_clustering(G) == 0 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_covering.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_covering.py new file mode 100644 index 0000000..9507e13 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_covering.py @@ -0,0 +1,33 @@ +import networkx as nx +from networkx.algorithms import bipartite + + +class TestMinEdgeCover: + """Tests for :func:`networkx.algorithms.bipartite.min_edge_cover`""" + + def test_empty_graph(self): + G = nx.Graph() + assert bipartite.min_edge_cover(G) == set() + + def test_graph_single_edge(self): + G = nx.Graph() + G.add_edge(0, 1) + assert bipartite.min_edge_cover(G) == {(0, 1), (1, 0)} + + def test_bipartite_default(self): + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4], bipartite=0) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) + min_cover = bipartite.min_edge_cover(G) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 8 + + def test_bipartite_explicit(self): + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4], bipartite=0) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) + min_cover = bipartite.min_edge_cover(G, bipartite.eppstein_matching) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 8 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py new file mode 100644 index 0000000..66be8a2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_edgelist.py @@ -0,0 +1,240 @@ +""" +Unit tests for bipartite edgelists. +""" + +import io + +import pytest + +import networkx as nx +from networkx.algorithms import bipartite +from networkx.utils import edges_equal, graphs_equal, nodes_equal + + +class TestEdgelist: + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_nodes_from(["a", "c", "e"], bipartite=0) + cls.G.add_nodes_from(["b", "d", "f"], bipartite=1) + cls.G.add_node("g", bipartite=0) + cls.DG = nx.DiGraph(cls.G) + cls.MG = nx.MultiGraph() + cls.MG.add_edges_from([(1, 2), (1, 2), (1, 2)]) + cls.MG.add_node(1, bipartite=0) + cls.MG.add_node(2, bipartite=1) + + def test_read_edgelist_1(self): + s = b"""\ +# comment line +1 2 +# comment line +2 3 +""" + bytesIO = io.BytesIO(s) + G = bipartite.read_edgelist(bytesIO, nodetype=int) + assert edges_equal(G.edges(), [(1, 2), (2, 3)]) + + def test_read_edgelist_3(self): + s = b"""\ +# comment line +1 2 {'weight':2.0} +# comment line +2 3 {'weight':3.0} +""" + bytesIO = io.BytesIO(s) + G = bipartite.read_edgelist(bytesIO, nodetype=int, data=False) + assert edges_equal(G.edges(), [(1, 2), (2, 3)]) + + bytesIO = io.BytesIO(s) + G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True) + assert edges_equal( + G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] + ) + + def test_write_edgelist_1(self): + fh = io.BytesIO() + G = nx.Graph() + G.add_edges_from([(1, 2), (2, 3)]) + G.add_node(1, bipartite=0) + G.add_node(2, bipartite=1) + G.add_node(3, bipartite=0) + bipartite.write_edgelist(G, fh, data=False) + fh.seek(0) + assert fh.read() == b"1 2\n3 2\n" + + def test_write_edgelist_2(self): + fh = io.BytesIO() + G = nx.Graph() + G.add_edges_from([(1, 2), (2, 3)]) + G.add_node(1, bipartite=0) + G.add_node(2, bipartite=1) + G.add_node(3, bipartite=0) + bipartite.write_edgelist(G, fh, data=True) + fh.seek(0) + assert fh.read() == b"1 2 {}\n3 2 {}\n" + + def test_write_edgelist_3(self): + fh = io.BytesIO() + G = nx.Graph() + G.add_edge(1, 2, weight=2.0) + G.add_edge(2, 3, weight=3.0) + G.add_node(1, bipartite=0) + G.add_node(2, bipartite=1) + G.add_node(3, bipartite=0) + bipartite.write_edgelist(G, fh, data=True) + fh.seek(0) + assert fh.read() == b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n" + + def test_write_edgelist_4(self): + fh = io.BytesIO() + G = nx.Graph() + G.add_edge(1, 2, weight=2.0) + G.add_edge(2, 3, weight=3.0) + G.add_node(1, bipartite=0) + G.add_node(2, bipartite=1) + G.add_node(3, bipartite=0) + bipartite.write_edgelist(G, fh, data=[("weight")]) + fh.seek(0) + assert fh.read() == b"1 2 2.0\n3 2 3.0\n" + + def test_unicode(self, tmp_path): + G = nx.Graph() + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) + G.add_node(name1, bipartite=0) + G.add_node("Radiohead", bipartite=1) + + fname = tmp_path / "edgelist.txt" + bipartite.write_edgelist(G, fname) + H = bipartite.read_edgelist(fname) + assert graphs_equal(G, H) + + def test_latin1_issue(self, tmp_path): + G = nx.Graph() + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) + G.add_node(name1, bipartite=0) + G.add_node("Radiohead", bipartite=1) + + fname = tmp_path / "edgelist.txt" + with pytest.raises(UnicodeEncodeError): + bipartite.write_edgelist(G, fname, encoding="latin-1") + + def test_latin1(self, tmp_path): + G = nx.Graph() + name1 = "Bj" + chr(246) + "rk" + name2 = chr(220) + "ber" + G.add_edge(name1, "Radiohead", **{name2: 3}) + G.add_node(name1, bipartite=0) + G.add_node("Radiohead", bipartite=1) + + fname = tmp_path / "edgelist.txt" + bipartite.write_edgelist(G, fname, encoding="latin-1") + H = bipartite.read_edgelist(fname, encoding="latin-1") + assert graphs_equal(G, H) + + def test_edgelist_graph(self, tmp_path): + G = self.G + fname = tmp_path / "edgelist.txt" + bipartite.write_edgelist(G, fname) + H = bipartite.read_edgelist(fname) + H2 = bipartite.read_edgelist(fname) + assert H is not H2 # they should be different graphs + G.remove_node("g") # isolated nodes are not written in edgelist + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_edgelist_integers(self, tmp_path): + G = nx.convert_node_labels_to_integers(self.G) + fname = tmp_path / "edgelist.txt" + bipartite.write_edgelist(G, fname) + H = bipartite.read_edgelist(fname, nodetype=int) + # isolated nodes are not written in edgelist + G.remove_nodes_from(list(nx.isolates(G))) + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_edgelist_multigraph(self, tmp_path): + G = self.MG + fname = tmp_path / "edgelist.txt" + bipartite.write_edgelist(G, fname) + H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) + H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_empty_digraph(self): + with pytest.raises(nx.NetworkXNotImplemented): + bytesIO = io.BytesIO() + bipartite.write_edgelist(nx.DiGraph(), bytesIO) + + def test_raise_attribute(self): + with pytest.raises(AttributeError): + G = nx.path_graph(4) + bytesIO = io.BytesIO() + bipartite.write_edgelist(G, bytesIO) + + def test_parse_edgelist(self): + """Tests for conditions specific to + parse_edge_list method""" + + # ignore strings of length less than 2 + lines = ["1 2", "2 3", "3 1", "4", " "] + G = bipartite.parse_edgelist(lines, nodetype=int) + assert list(G.nodes) == [1, 2, 3] + + # Exception raised when node is not convertible + # to specified data type + with pytest.raises(TypeError, match=".*Failed to convert nodes"): + lines = ["a b", "b c", "c a"] + G = bipartite.parse_edgelist(lines, nodetype=int) + + # Exception raised when format of data is not + # convertible to dictionary object + with pytest.raises(TypeError, match=".*Failed to convert edge data"): + lines = ["1 2 3", "2 3 4", "3 1 2"] + G = bipartite.parse_edgelist(lines, nodetype=int) + + # Exception raised when edge data and data + # keys are not of same length + with pytest.raises(IndexError): + lines = ["1 2 3 4", "2 3 4"] + G = bipartite.parse_edgelist( + lines, nodetype=int, data=[("weight", int), ("key", int)] + ) + + # Exception raised when edge data is not + # convertible to specified data type + with pytest.raises(TypeError, match=".*Failed to convert key data"): + lines = ["1 2 3 a", "2 3 4 b"] + G = bipartite.parse_edgelist( + lines, nodetype=int, data=[("weight", int), ("key", int)] + ) + + +def test_bipartite_edgelist_consistent_strip_handling(): + """See gh-7462 + + Input when printed looks like: + + A B interaction 2 + B C interaction 4 + C A interaction + + Note the trailing \\t in the last line, which indicates the existence of + an empty data field. + """ + lines = io.StringIO( + "A\tB\tinteraction\t2\nB\tC\tinteraction\t4\nC\tA\tinteraction\t" + ) + descr = [("type", str), ("weight", str)] + # Should not raise + G = nx.bipartite.parse_edgelist(lines, delimiter="\t", data=descr) + expected = [("A", "B", "2"), ("A", "C", ""), ("B", "C", "4")] + assert sorted(G.edges(data="weight")) == expected diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py new file mode 100644 index 0000000..17b7124 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_extendability.py @@ -0,0 +1,334 @@ +import pytest + +import networkx as nx + + +def test_selfloops_raises(): + G = nx.ladder_graph(3) + G.add_edge(0, 0) + with pytest.raises(nx.NetworkXError, match=".*not bipartite"): + nx.bipartite.maximal_extendability(G) + + +def test_disconnected_raises(): + G = nx.ladder_graph(3) + G.add_node("a") + with pytest.raises(nx.NetworkXError, match=".*not connected"): + nx.bipartite.maximal_extendability(G) + + +def test_not_bipartite_raises(): + G = nx.complete_graph(5) + with pytest.raises(nx.NetworkXError, match=".*not bipartite"): + nx.bipartite.maximal_extendability(G) + + +def test_no_perfect_matching_raises(): + G = nx.Graph([(0, 1), (0, 2)]) + with pytest.raises(nx.NetworkXError, match=".*not contain a perfect matching"): + nx.bipartite.maximal_extendability(G) + + +def test_residual_graph_not_strongly_connected_raises(): + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + with pytest.raises( + nx.NetworkXError, match="The residual graph of G is not strongly connected" + ): + nx.bipartite.maximal_extendability(G) + + +def test_ladder_graph_is_1(): + G = nx.ladder_graph(3) + assert nx.bipartite.maximal_extendability(G) == 1 + + +def test_cubical_graph_is_2(): + G = nx.cubical_graph() + assert nx.bipartite.maximal_extendability(G) == 2 + + +def test_k_is_3(): + G = nx.Graph( + [ + (1, 6), + (1, 7), + (1, 8), + (1, 9), + (2, 6), + (2, 7), + (2, 8), + (2, 10), + (3, 6), + (3, 8), + (3, 9), + (3, 10), + (4, 7), + (4, 8), + (4, 9), + (4, 10), + (5, 6), + (5, 7), + (5, 9), + (5, 10), + ] + ) + assert nx.bipartite.maximal_extendability(G) == 3 + + +def test_k_is_4(): + G = nx.Graph( + [ + (8, 1), + (8, 2), + (8, 3), + (8, 4), + (8, 5), + (9, 1), + (9, 2), + (9, 3), + (9, 4), + (9, 7), + (10, 1), + (10, 2), + (10, 3), + (10, 4), + (10, 6), + (11, 1), + (11, 2), + (11, 5), + (11, 6), + (11, 7), + (12, 1), + (12, 3), + (12, 5), + (12, 6), + (12, 7), + (13, 2), + (13, 4), + (13, 5), + (13, 6), + (13, 7), + (14, 3), + (14, 4), + (14, 5), + (14, 6), + (14, 7), + ] + ) + assert nx.bipartite.maximal_extendability(G) == 4 + + +def test_k_is_5(): + G = nx.Graph( + [ + (8, 1), + (8, 2), + (8, 3), + (8, 4), + (8, 5), + (8, 6), + (9, 1), + (9, 2), + (9, 3), + (9, 4), + (9, 5), + (9, 7), + (10, 1), + (10, 2), + (10, 3), + (10, 4), + (10, 6), + (10, 7), + (11, 1), + (11, 2), + (11, 3), + (11, 5), + (11, 6), + (11, 7), + (12, 1), + (12, 2), + (12, 4), + (12, 5), + (12, 6), + (12, 7), + (13, 1), + (13, 3), + (13, 4), + (13, 5), + (13, 6), + (13, 7), + (14, 2), + (14, 3), + (14, 4), + (14, 5), + (14, 6), + (14, 7), + ] + ) + assert nx.bipartite.maximal_extendability(G) == 5 + + +def test_k_is_6(): + G = nx.Graph( + [ + (9, 1), + (9, 2), + (9, 3), + (9, 4), + (9, 5), + (9, 6), + (9, 7), + (10, 1), + (10, 2), + (10, 3), + (10, 4), + (10, 5), + (10, 6), + (10, 8), + (11, 1), + (11, 2), + (11, 3), + (11, 4), + (11, 5), + (11, 7), + (11, 8), + (12, 1), + (12, 2), + (12, 3), + (12, 4), + (12, 6), + (12, 7), + (12, 8), + (13, 1), + (13, 2), + (13, 3), + (13, 5), + (13, 6), + (13, 7), + (13, 8), + (14, 1), + (14, 2), + (14, 4), + (14, 5), + (14, 6), + (14, 7), + (14, 8), + (15, 1), + (15, 3), + (15, 4), + (15, 5), + (15, 6), + (15, 7), + (15, 8), + (16, 2), + (16, 3), + (16, 4), + (16, 5), + (16, 6), + (16, 7), + (16, 8), + ] + ) + assert nx.bipartite.maximal_extendability(G) == 6 + + +def test_k_is_7(): + G = nx.Graph( + [ + (1, 11), + (1, 12), + (1, 13), + (1, 14), + (1, 15), + (1, 16), + (1, 17), + (1, 18), + (2, 11), + (2, 12), + (2, 13), + (2, 14), + (2, 15), + (2, 16), + (2, 17), + (2, 19), + (3, 11), + (3, 12), + (3, 13), + (3, 14), + (3, 15), + (3, 16), + (3, 17), + (3, 20), + (4, 11), + (4, 12), + (4, 13), + (4, 14), + (4, 15), + (4, 16), + (4, 17), + (4, 18), + (4, 19), + (4, 20), + (5, 11), + (5, 12), + (5, 13), + (5, 14), + (5, 15), + (5, 16), + (5, 17), + (5, 18), + (5, 19), + (5, 20), + (6, 11), + (6, 12), + (6, 13), + (6, 14), + (6, 15), + (6, 16), + (6, 17), + (6, 18), + (6, 19), + (6, 20), + (7, 11), + (7, 12), + (7, 13), + (7, 14), + (7, 15), + (7, 16), + (7, 17), + (7, 18), + (7, 19), + (7, 20), + (8, 11), + (8, 12), + (8, 13), + (8, 14), + (8, 15), + (8, 16), + (8, 17), + (8, 18), + (8, 19), + (8, 20), + (9, 11), + (9, 12), + (9, 13), + (9, 14), + (9, 15), + (9, 16), + (9, 17), + (9, 18), + (9, 19), + (9, 20), + (10, 11), + (10, 12), + (10, 13), + (10, 14), + (10, 15), + (10, 16), + (10, 17), + (10, 18), + (10, 19), + (10, 20), + ] + ) + assert nx.bipartite.maximal_extendability(G) == 7 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_generators.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_generators.py new file mode 100644 index 0000000..3c394db --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_generators.py @@ -0,0 +1,407 @@ +import numbers + +import pytest + +import networkx as nx + +from ..generators import ( + alternating_havel_hakimi_graph, + complete_bipartite_graph, + configuration_model, + gnmk_random_graph, + havel_hakimi_graph, + preferential_attachment_graph, + random_graph, + reverse_havel_hakimi_graph, +) + +""" +Generators - Bipartite +---------------------- +""" + + +class TestGeneratorsBipartite: + def test_complete_bipartite_graph(self): + G = complete_bipartite_graph(0, 0) + assert nx.is_isomorphic(G, nx.null_graph()) + + for i in [1, 5]: + G = complete_bipartite_graph(i, 0) + assert nx.is_isomorphic(G, nx.empty_graph(i)) + G = complete_bipartite_graph(0, i) + assert nx.is_isomorphic(G, nx.empty_graph(i)) + + G = complete_bipartite_graph(2, 2) + assert nx.is_isomorphic(G, nx.cycle_graph(4)) + + G = complete_bipartite_graph(1, 5) + assert nx.is_isomorphic(G, nx.star_graph(5)) + + G = complete_bipartite_graph(5, 1) + assert nx.is_isomorphic(G, nx.star_graph(5)) + + # complete_bipartite_graph(m1,m2) is a connected graph with + # m1+m2 nodes and m1*m2 edges + for m1, m2 in [(5, 11), (7, 3)]: + G = complete_bipartite_graph(m1, m2) + assert nx.number_of_nodes(G) == m1 + m2 + assert nx.number_of_edges(G) == m1 * m2 + + with pytest.raises(nx.NetworkXError): + complete_bipartite_graph(7, 3, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXError): + complete_bipartite_graph(7, 3, create_using=nx.MultiDiGraph) + + mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph) + assert mG.is_multigraph() + assert sorted(mG.edges()) == sorted(G.edges()) + + mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph) + assert mG.is_multigraph() + assert sorted(mG.edges()) == sorted(G.edges()) + + mG = complete_bipartite_graph(7, 3) # default to Graph + assert sorted(mG.edges()) == sorted(G.edges()) + assert not mG.is_multigraph() + assert not mG.is_directed() + + # specify nodes rather than number of nodes + for n1, n2 in [([1, 2], "ab"), (3, 2), (3, "ab"), ("ab", 3)]: + G = complete_bipartite_graph(n1, n2) + if isinstance(n1, numbers.Integral): + if isinstance(n2, numbers.Integral): + n2 = range(n1, n1 + n2) + n1 = range(n1) + elif isinstance(n2, numbers.Integral): + n2 = range(n2) + edges = {(u, v) for u in n1 for v in n2} + assert edges == set(G.edges) + assert G.size() == len(edges) + + # raise when node sets are not distinct + for n1, n2 in [([1, 2], 3), (3, [1, 2]), ("abc", "bcd")]: + pytest.raises(nx.NetworkXError, complete_bipartite_graph, n1, n2) + + def test_configuration_model(self): + aseq = [] + bseq = [] + G = configuration_model(aseq, bseq) + assert len(G) == 0 + + aseq = [0, 0] + bseq = [0, 0] + G = configuration_model(aseq, bseq) + assert len(G) == 4 + assert G.number_of_edges() == 0 + + aseq = [3, 3, 3, 3] + bseq = [2, 2, 2, 2, 2] + pytest.raises(nx.NetworkXError, configuration_model, aseq, bseq) + + aseq = [3, 3, 3, 3] + bseq = [2, 2, 2, 2, 2, 2] + G = configuration_model(aseq, bseq) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + aseq = [2, 2, 2, 2, 2, 2] + bseq = [3, 3, 3, 3] + G = configuration_model(aseq, bseq) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + aseq = [2, 2, 2, 1, 1, 1] + bseq = [3, 3, 3] + G = configuration_model(aseq, bseq) + assert G.is_multigraph() + assert not G.is_directed() + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] + + GU = nx.projected_graph(nx.Graph(G), range(len(aseq))) + assert GU.number_of_nodes() == 6 + + GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq))) + assert GD.number_of_nodes() == 3 + + G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph() + ) + pytest.raises( + nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, + configuration_model, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) + + def test_havel_hakimi_graph(self): + aseq = [] + bseq = [] + G = havel_hakimi_graph(aseq, bseq) + assert len(G) == 0 + + aseq = [0, 0] + bseq = [0, 0] + G = havel_hakimi_graph(aseq, bseq) + assert len(G) == 4 + assert G.number_of_edges() == 0 + + aseq = [3, 3, 3, 3] + bseq = [2, 2, 2, 2, 2] + pytest.raises(nx.NetworkXError, havel_hakimi_graph, aseq, bseq) + + bseq = [2, 2, 2, 2, 2, 2] + G = havel_hakimi_graph(aseq, bseq) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + aseq = [2, 2, 2, 2, 2, 2] + bseq = [3, 3, 3, 3] + G = havel_hakimi_graph(aseq, bseq) + assert G.is_multigraph() + assert not G.is_directed() + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + GU = nx.projected_graph(nx.Graph(G), range(len(aseq))) + assert GU.number_of_nodes() == 6 + + GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq))) + assert GD.number_of_nodes() == 4 + + G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph + ) + pytest.raises( + nx.NetworkXError, + havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) + + def test_reverse_havel_hakimi_graph(self): + aseq = [] + bseq = [] + G = reverse_havel_hakimi_graph(aseq, bseq) + assert len(G) == 0 + + aseq = [0, 0] + bseq = [0, 0] + G = reverse_havel_hakimi_graph(aseq, bseq) + assert len(G) == 4 + assert G.number_of_edges() == 0 + + aseq = [3, 3, 3, 3] + bseq = [2, 2, 2, 2, 2] + pytest.raises(nx.NetworkXError, reverse_havel_hakimi_graph, aseq, bseq) + + bseq = [2, 2, 2, 2, 2, 2] + G = reverse_havel_hakimi_graph(aseq, bseq) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + aseq = [2, 2, 2, 2, 2, 2] + bseq = [3, 3, 3, 3] + G = reverse_havel_hakimi_graph(aseq, bseq) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + aseq = [2, 2, 2, 1, 1, 1] + bseq = [3, 3, 3] + G = reverse_havel_hakimi_graph(aseq, bseq) + assert G.is_multigraph() + assert not G.is_directed() + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] + + GU = nx.projected_graph(nx.Graph(G), range(len(aseq))) + assert GU.number_of_nodes() == 6 + + GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq))) + assert GD.number_of_nodes() == 3 + + G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + reverse_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) + + def test_alternating_havel_hakimi_graph(self): + aseq = [] + bseq = [] + G = alternating_havel_hakimi_graph(aseq, bseq) + assert len(G) == 0 + + aseq = [0, 0] + bseq = [0, 0] + G = alternating_havel_hakimi_graph(aseq, bseq) + assert len(G) == 4 + assert G.number_of_edges() == 0 + + aseq = [3, 3, 3, 3] + bseq = [2, 2, 2, 2, 2] + pytest.raises(nx.NetworkXError, alternating_havel_hakimi_graph, aseq, bseq) + + bseq = [2, 2, 2, 2, 2, 2] + G = alternating_havel_hakimi_graph(aseq, bseq) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + aseq = [2, 2, 2, 2, 2, 2] + bseq = [3, 3, 3, 3] + G = alternating_havel_hakimi_graph(aseq, bseq) + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3] + + aseq = [2, 2, 2, 1, 1, 1] + bseq = [3, 3, 3] + G = alternating_havel_hakimi_graph(aseq, bseq) + assert G.is_multigraph() + assert not G.is_directed() + assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3] + + GU = nx.projected_graph(nx.Graph(G), range(len(aseq))) + assert GU.number_of_nodes() == 6 + + GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq))) + assert GD.number_of_nodes() == 3 + + G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.DiGraph, + ) + pytest.raises( + nx.NetworkXError, + alternating_havel_hakimi_graph, + aseq, + bseq, + create_using=nx.MultiDiGraph, + ) + + def test_preferential_attachment(self): + aseq = [3, 2, 1, 1] + G = preferential_attachment_graph(aseq, 0.5) + assert G.is_multigraph() + assert not G.is_directed() + + G = preferential_attachment_graph(aseq, 0.5, create_using=nx.Graph) + assert not G.is_multigraph() + assert not G.is_directed() + + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) + pytest.raises( + nx.NetworkXError, + preferential_attachment_graph, + aseq, + 0.5, + create_using=nx.DiGraph(), + ) + + def test_random_graph(self): + n = 10 + m = 20 + G = random_graph(n, m, 0.9) + assert len(G) == 30 + assert nx.is_bipartite(G) + X, Y = nx.algorithms.bipartite.sets(G) + assert set(range(n)) == X + assert set(range(n, n + m)) == Y + + def test_random_digraph(self): + n = 10 + m = 20 + G = random_graph(n, m, 0.9, directed=True) + assert len(G) == 30 + assert nx.is_bipartite(G) + X, Y = nx.algorithms.bipartite.sets(G) + assert set(range(n)) == X + assert set(range(n, n + m)) == Y + + def test_gnmk_random_graph(self): + n = 10 + m = 20 + edges = 100 + # set seed because sometimes it is not connected + # which raises an error in bipartite.sets(G) below. + G = gnmk_random_graph(n, m, edges, seed=1234) + assert len(G) == n + m + assert nx.is_bipartite(G) + X, Y = nx.algorithms.bipartite.sets(G) + assert set(range(n)) == X + assert set(range(n, n + m)) == Y + assert edges == len(list(G.edges())) + + def test_gnmk_random_graph_complete(self): + n = 10 + m = 20 + edges = 200 + G = gnmk_random_graph(n, m, edges) + assert len(G) == n + m + assert nx.is_bipartite(G) + X, Y = nx.algorithms.bipartite.sets(G) + assert set(range(n)) == X + assert set(range(n, n + m)) == Y + assert edges == len(list(G.edges())) + + @pytest.mark.parametrize("n", (4, range(4), {0, 1, 2, 3})) + @pytest.mark.parametrize("m", (range(4, 7), {4, 5, 6})) + def test_complete_bipartite_graph_str(self, n, m): + """Ensure G.name is consistent for all inputs accepted by nodes_or_number. + See gh-7396""" + G = nx.complete_bipartite_graph(n, m) + ans = "Graph named 'complete_bipartite_graph(4, 3)' with 7 nodes and 12 edges" + assert str(G) == ans diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_link_analysis.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_link_analysis.py new file mode 100644 index 0000000..6e46056 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_link_analysis.py @@ -0,0 +1,218 @@ +import itertools + +import pytest + +import networkx as nx +from networkx.algorithms import bipartite + +pytest.importorskip("scipy") + + +class TestBipartiteLinkAnalysis: + @classmethod + def setup_class(cls): + cls.davis_southern_women_graph = nx.davis_southern_women_graph() + cls.women_bipartite_set = { + node + for node, bipartite in cls.davis_southern_women_graph.nodes( + data="bipartite" + ) + if bipartite == 0 + } + cls.gnmk_random_graph = nx.bipartite.generators.gnmk_random_graph( + 5 * 10**2, 10**2, 5 * 10**2, seed=27 + ) + cls.gnmk_random_graph_top_nodes = { + node + for node, bipartite in cls.gnmk_random_graph.nodes(data="bipartite") + if bipartite == 0 + } + + def test_collaborative_filtering_birank(self): + elist = [ + ("u1", "p1", 5), + ("u2", "p1", 5), + ("u2", "p2", 4), + ("u3", "p1", 3), + ("u3", "p3", 2), + ] + item_recommendation_graph = nx.DiGraph() + item_recommendation_graph.add_weighted_edges_from(elist, weight="rating") + product_nodes = ("p1", "p2", "p3") + u1_query = { + product: rating + for _, product, rating in item_recommendation_graph.edges( + nbunch="u1", data="rating" + ) + } + u1_birank_results = bipartite.birank( + item_recommendation_graph, + product_nodes, + alpha=0.8, + beta=1.0, + top_personalization=u1_query, + weight="rating", + ) + + assert u1_birank_results["p2"] > u1_birank_results["p3"] + + u1_birank_results_unweighted = bipartite.birank( + item_recommendation_graph, + product_nodes, + alpha=0.8, + beta=1.0, + top_personalization=u1_query, + weight=None, + ) + + assert u1_birank_results_unweighted["p2"] == pytest.approx( + u1_birank_results_unweighted["p3"], rel=2e-6 + ) + + def test_davis_birank(self): + scores = bipartite.birank( + self.davis_southern_women_graph, self.women_bipartite_set + ) + answer = { + "Laura Mandeville": 0.07, + "Olivia Carleton": 0.04, + "Frances Anderson": 0.05, + "Pearl Oglethorpe": 0.04, + "Katherina Rogers": 0.06, + "Flora Price": 0.04, + "Dorothy Murchison": 0.04, + "Helen Lloyd": 0.06, + "Theresa Anderson": 0.07, + "Eleanor Nye": 0.05, + "Evelyn Jefferson": 0.07, + "Sylvia Avondale": 0.07, + "Charlotte McDowd": 0.05, + "Verne Sanderson": 0.05, + "Myra Liddel": 0.05, + "Brenda Rogers": 0.07, + "Ruth DeSand": 0.05, + "Nora Fayette": 0.07, + "E8": 0.11, + "E7": 0.09, + "E10": 0.07, + "E9": 0.1, + "E13": 0.05, + "E3": 0.07, + "E12": 0.07, + "E11": 0.06, + "E2": 0.05, + "E5": 0.08, + "E6": 0.08, + "E14": 0.05, + "E4": 0.06, + "E1": 0.05, + } + + for node, value in answer.items(): + assert scores[node] == pytest.approx(value, abs=1e-2) + + def test_davis_birank_with_personalization(self): + women_personalization = {"Laura Mandeville": 1} + scores = bipartite.birank( + self.davis_southern_women_graph, + self.women_bipartite_set, + top_personalization=women_personalization, + ) + answer = { + "Laura Mandeville": 0.29, + "Olivia Carleton": 0.02, + "Frances Anderson": 0.06, + "Pearl Oglethorpe": 0.04, + "Katherina Rogers": 0.04, + "Flora Price": 0.02, + "Dorothy Murchison": 0.03, + "Helen Lloyd": 0.04, + "Theresa Anderson": 0.08, + "Eleanor Nye": 0.05, + "Evelyn Jefferson": 0.09, + "Sylvia Avondale": 0.05, + "Charlotte McDowd": 0.06, + "Verne Sanderson": 0.04, + "Myra Liddel": 0.03, + "Brenda Rogers": 0.08, + "Ruth DeSand": 0.05, + "Nora Fayette": 0.05, + "E8": 0.11, + "E7": 0.1, + "E10": 0.04, + "E9": 0.07, + "E13": 0.03, + "E3": 0.11, + "E12": 0.04, + "E11": 0.03, + "E2": 0.1, + "E5": 0.11, + "E6": 0.1, + "E14": 0.03, + "E4": 0.06, + "E1": 0.1, + } + + for node, value in answer.items(): + assert scores[node] == pytest.approx(value, abs=1e-2) + + def test_birank_empty_bipartite_set(self): + G = nx.Graph() + all_nodes = [1, 2, 3] + G.add_nodes_from(all_nodes) + + # Test with empty bipartite set + with pytest.raises(nx.NetworkXAlgorithmError): + bipartite.birank(G, all_nodes) + + @pytest.mark.parametrize( + "damping_factor,value", itertools.product(["alpha", "beta"], [-0.1, 1.1]) + ) + def test_birank_invalid_alpha_beta(self, damping_factor, value): + kwargs = {damping_factor: value} + with pytest.raises(nx.NetworkXAlgorithmError): + bipartite.birank( + self.davis_southern_women_graph, self.women_bipartite_set, **kwargs + ) + + def test_birank_power_iteration_failed_convergence(self): + with pytest.raises(nx.PowerIterationFailedConvergence): + bipartite.birank( + self.davis_southern_women_graph, self.women_bipartite_set, max_iter=1 + ) + + @pytest.mark.parametrize( + "personalization,alpha,beta", + itertools.product( + [ + # Concentrated case + lambda x: 1000 if x == 0 else 0, + # Uniform case + lambda x: 5, + # Zero case + lambda x: 0, + ], + [i / 2 for i in range(3)], + [i / 2 for i in range(3)], + ), + ) + def test_gnmk_convergence_birank(self, personalization, alpha, beta): + top_personalization_dict = { + node: personalization(node) for node in self.gnmk_random_graph_top_nodes + } + bipartite.birank( + self.gnmk_random_graph, + self.gnmk_random_graph_top_nodes, + top_personalization=top_personalization_dict, + alpha=alpha, + beta=beta, + ) + + def test_negative_personalization(self): + top_personalization_dict = {0: -1} + with pytest.raises(nx.NetworkXAlgorithmError): + bipartite.birank( + self.gnmk_random_graph, + self.gnmk_random_graph_top_nodes, + top_personalization=top_personalization_dict, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matching.py new file mode 100644 index 0000000..c24659e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matching.py @@ -0,0 +1,327 @@ +"""Unit tests for the :mod:`networkx.algorithms.bipartite.matching` module.""" + +import itertools + +import pytest + +import networkx as nx +from networkx.algorithms.bipartite.matching import ( + eppstein_matching, + hopcroft_karp_matching, + maximum_matching, + minimum_weight_full_matching, + to_vertex_cover, +) + + +class TestMatching: + """Tests for bipartite matching algorithms.""" + + def setup_method(self): + """Creates a bipartite graph for use in testing matching algorithms. + + The bipartite graph has a maximum cardinality matching that leaves + vertex 1 and vertex 10 unmatched. The first six numbers are the left + vertices and the next six numbers are the right vertices. + + """ + self.simple_graph = nx.complete_bipartite_graph(2, 3) + self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1} + + edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)] + self.top_nodes = set(range(6)) + self.graph = nx.Graph() + self.graph.add_nodes_from(range(12)) + self.graph.add_edges_from(edges) + + # Example bipartite graph from issue 2127 + G = nx.Graph() + G.add_nodes_from( + [ + (1, "C"), + (1, "B"), + (0, "G"), + (1, "F"), + (1, "E"), + (0, "C"), + (1, "D"), + (1, "I"), + (0, "A"), + (0, "D"), + (0, "F"), + (0, "E"), + (0, "H"), + (1, "G"), + (1, "A"), + (0, "I"), + (0, "B"), + (1, "H"), + ] + ) + G.add_edge((1, "C"), (0, "A")) + G.add_edge((1, "B"), (0, "A")) + G.add_edge((0, "G"), (1, "I")) + G.add_edge((0, "G"), (1, "H")) + G.add_edge((1, "F"), (0, "A")) + G.add_edge((1, "F"), (0, "C")) + G.add_edge((1, "F"), (0, "E")) + G.add_edge((1, "E"), (0, "A")) + G.add_edge((1, "E"), (0, "C")) + G.add_edge((0, "C"), (1, "D")) + G.add_edge((0, "C"), (1, "I")) + G.add_edge((0, "C"), (1, "G")) + G.add_edge((0, "C"), (1, "H")) + G.add_edge((1, "D"), (0, "A")) + G.add_edge((1, "I"), (0, "A")) + G.add_edge((1, "I"), (0, "E")) + G.add_edge((0, "A"), (1, "G")) + G.add_edge((0, "A"), (1, "H")) + G.add_edge((0, "E"), (1, "G")) + G.add_edge((0, "E"), (1, "H")) + self.disconnected_graph = G + + def check_match(self, matching): + """Asserts that the matching is what we expect from the bipartite graph + constructed in the :meth:`setup` fixture. + + """ + # For the sake of brevity, rename `matching` to `M`. + M = matching + matched_vertices = frozenset(itertools.chain(*M.items())) + # Assert that the maximum number of vertices (10) is matched. + assert matched_vertices == frozenset(range(12)) - {1, 10} + # Assert that no vertex appears in two edges, or in other words, that + # the matching (u, v) and (v, u) both appear in the matching + # dictionary. + assert all(u == M[M[u]] for u in range(12) if u in M) + + def check_vertex_cover(self, vertices): + """Asserts that the given set of vertices is the vertex cover we + expected from the bipartite graph constructed in the :meth:`setup` + fixture. + + """ + # By Konig's theorem, the number of edges in a maximum matching equals + # the number of vertices in a minimum vertex cover. + assert len(vertices) == 5 + # Assert that the set is truly a vertex cover. + for u, v in self.graph.edges(): + assert u in vertices or v in vertices + # TODO Assert that the vertices are the correct ones. + + def test_eppstein_matching(self): + """Tests that David Eppstein's implementation of the Hopcroft--Karp + algorithm produces a maximum cardinality matching. + + """ + self.check_match(eppstein_matching(self.graph, self.top_nodes)) + + def test_hopcroft_karp_matching(self): + """Tests that the Hopcroft--Karp algorithm produces a maximum + cardinality matching in a bipartite graph. + + """ + self.check_match(hopcroft_karp_matching(self.graph, self.top_nodes)) + + def test_to_vertex_cover(self): + """Test for converting a maximum matching to a minimum vertex cover.""" + matching = maximum_matching(self.graph, self.top_nodes) + vertex_cover = to_vertex_cover(self.graph, matching, self.top_nodes) + self.check_vertex_cover(vertex_cover) + + def test_eppstein_matching_simple(self): + match = eppstein_matching(self.simple_graph) + assert match == self.simple_solution + + def test_hopcroft_karp_matching_simple(self): + match = hopcroft_karp_matching(self.simple_graph) + assert match == self.simple_solution + + def test_eppstein_matching_disconnected(self): + with pytest.raises(nx.AmbiguousSolution): + match = eppstein_matching(self.disconnected_graph) + + def test_hopcroft_karp_matching_disconnected(self): + with pytest.raises(nx.AmbiguousSolution): + match = hopcroft_karp_matching(self.disconnected_graph) + + def test_issue_2127(self): + """Test from issue 2127""" + # Build the example DAG + G = nx.DiGraph() + G.add_edge("A", "C") + G.add_edge("A", "B") + G.add_edge("C", "E") + G.add_edge("C", "D") + G.add_edge("E", "G") + G.add_edge("E", "F") + G.add_edge("G", "I") + G.add_edge("G", "H") + + tc = nx.transitive_closure(G) + btc = nx.Graph() + + # Create a bipartite graph based on the transitive closure of G + for v in tc.nodes(): + btc.add_node((0, v)) + btc.add_node((1, v)) + + for u, v in tc.edges(): + btc.add_edge((0, u), (1, v)) + + top_nodes = {n for n in btc if n[0] == 0} + matching = hopcroft_karp_matching(btc, top_nodes) + vertex_cover = to_vertex_cover(btc, matching, top_nodes) + independent_set = set(G) - {v for _, v in vertex_cover} + assert {"B", "D", "F", "I", "H"} == independent_set + + def test_vertex_cover_issue_2384(self): + G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)]) + matching = maximum_matching(G) + vertex_cover = to_vertex_cover(G, matching) + for u, v in G.edges(): + assert u in vertex_cover or v in vertex_cover + + def test_vertex_cover_issue_3306(self): + G = nx.Graph() + edges = [(0, 2), (1, 0), (1, 1), (1, 2), (2, 2)] + G.add_edges_from([((i, "L"), (j, "R")) for i, j in edges]) + + matching = maximum_matching(G) + vertex_cover = to_vertex_cover(G, matching) + for u, v in G.edges(): + assert u in vertex_cover or v in vertex_cover + + def test_unorderable_nodes(self): + a = object() + b = object() + c = object() + d = object() + e = object() + G = nx.Graph([(a, d), (b, d), (b, e), (c, d)]) + matching = maximum_matching(G) + vertex_cover = to_vertex_cover(G, matching) + for u, v in G.edges(): + assert u in vertex_cover or v in vertex_cover + + +def test_eppstein_matching(): + """Test in accordance to issue #1927""" + G = nx.Graph() + G.add_nodes_from(["a", 2, 3, 4], bipartite=0) + G.add_nodes_from([1, "b", "c"], bipartite=1) + G.add_edges_from([("a", 1), ("a", "b"), (2, "b"), (2, "c"), (3, "c"), (4, 1)]) + matching = eppstein_matching(G) + assert len(matching) == len(maximum_matching(G)) + assert all(x in set(matching.keys()) for x in set(matching.values())) + + +class TestMinimumWeightFullMatching: + @classmethod + def setup_class(cls): + pytest.importorskip("scipy") + + def test_minimum_weight_full_matching_incomplete_graph(self): + B = nx.Graph() + B.add_nodes_from([1, 2], bipartite=0) + B.add_nodes_from([3, 4], bipartite=1) + B.add_edge(1, 4, weight=100) + B.add_edge(2, 3, weight=100) + B.add_edge(2, 4, weight=50) + matching = minimum_weight_full_matching(B) + assert matching == {1: 4, 2: 3, 4: 1, 3: 2} + + def test_minimum_weight_full_matching_with_no_full_matching(self): + B = nx.Graph() + B.add_nodes_from([1, 2, 3], bipartite=0) + B.add_nodes_from([4, 5, 6], bipartite=1) + B.add_edge(1, 4, weight=100) + B.add_edge(2, 4, weight=100) + B.add_edge(3, 4, weight=50) + B.add_edge(3, 5, weight=50) + B.add_edge(3, 6, weight=50) + with pytest.raises(ValueError): + minimum_weight_full_matching(B) + + def test_minimum_weight_full_matching_square(self): + G = nx.complete_bipartite_graph(3, 3) + G.add_edge(0, 3, weight=400) + G.add_edge(0, 4, weight=150) + G.add_edge(0, 5, weight=400) + G.add_edge(1, 3, weight=400) + G.add_edge(1, 4, weight=450) + G.add_edge(1, 5, weight=600) + G.add_edge(2, 3, weight=300) + G.add_edge(2, 4, weight=225) + G.add_edge(2, 5, weight=300) + matching = minimum_weight_full_matching(G) + assert matching == {0: 4, 1: 3, 2: 5, 4: 0, 3: 1, 5: 2} + + def test_minimum_weight_full_matching_smaller_left(self): + G = nx.complete_bipartite_graph(3, 4) + G.add_edge(0, 3, weight=400) + G.add_edge(0, 4, weight=150) + G.add_edge(0, 5, weight=400) + G.add_edge(0, 6, weight=1) + G.add_edge(1, 3, weight=400) + G.add_edge(1, 4, weight=450) + G.add_edge(1, 5, weight=600) + G.add_edge(1, 6, weight=2) + G.add_edge(2, 3, weight=300) + G.add_edge(2, 4, weight=225) + G.add_edge(2, 5, weight=290) + G.add_edge(2, 6, weight=3) + matching = minimum_weight_full_matching(G) + assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1} + + def test_minimum_weight_full_matching_smaller_top_nodes_right(self): + G = nx.complete_bipartite_graph(3, 4) + G.add_edge(0, 3, weight=400) + G.add_edge(0, 4, weight=150) + G.add_edge(0, 5, weight=400) + G.add_edge(0, 6, weight=1) + G.add_edge(1, 3, weight=400) + G.add_edge(1, 4, weight=450) + G.add_edge(1, 5, weight=600) + G.add_edge(1, 6, weight=2) + G.add_edge(2, 3, weight=300) + G.add_edge(2, 4, weight=225) + G.add_edge(2, 5, weight=290) + G.add_edge(2, 6, weight=3) + matching = minimum_weight_full_matching(G, top_nodes=[3, 4, 5, 6]) + assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1} + + def test_minimum_weight_full_matching_smaller_right(self): + G = nx.complete_bipartite_graph(4, 3) + G.add_edge(0, 4, weight=400) + G.add_edge(0, 5, weight=400) + G.add_edge(0, 6, weight=300) + G.add_edge(1, 4, weight=150) + G.add_edge(1, 5, weight=450) + G.add_edge(1, 6, weight=225) + G.add_edge(2, 4, weight=400) + G.add_edge(2, 5, weight=600) + G.add_edge(2, 6, weight=290) + G.add_edge(3, 4, weight=1) + G.add_edge(3, 5, weight=2) + G.add_edge(3, 6, weight=3) + matching = minimum_weight_full_matching(G) + assert matching == {1: 4, 2: 6, 3: 5, 4: 1, 5: 3, 6: 2} + + def test_minimum_weight_full_matching_negative_weights(self): + G = nx.complete_bipartite_graph(2, 2) + G.add_edge(0, 2, weight=-2) + G.add_edge(0, 3, weight=0.2) + G.add_edge(1, 2, weight=-2) + G.add_edge(1, 3, weight=0.3) + matching = minimum_weight_full_matching(G) + assert matching == {0: 3, 1: 2, 2: 1, 3: 0} + + def test_minimum_weight_full_matching_different_weight_key(self): + G = nx.complete_bipartite_graph(2, 2) + G.add_edge(0, 2, mass=2) + G.add_edge(0, 3, mass=0.2) + G.add_edge(1, 2, mass=1) + G.add_edge(1, 3, mass=2) + matching = minimum_weight_full_matching(G, weight="mass") + assert matching == {0: 3, 1: 2, 2: 1, 3: 0} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py new file mode 100644 index 0000000..fc8dbc7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_matrix.py @@ -0,0 +1,82 @@ +import pytest + +import networkx as nx +from networkx.algorithms import bipartite +from networkx.utils import edges_equal + +np = pytest.importorskip("numpy") +sp = pytest.importorskip("scipy") + + +class TestBiadjacencyMatrix: + def test_biadjacency_matrix_weight(self): + G = nx.path_graph(5) + G.add_edge(0, 1, weight=2, other=4) + X = [1, 3] + Y = [0, 2, 4] + M = bipartite.biadjacency_matrix(G, X, weight="weight") + assert M[0, 0] == 2 + M = bipartite.biadjacency_matrix(G, X, weight="other") + assert M[0, 0] == 4 + + def test_biadjacency_matrix(self): + tops = [2, 5, 10] + bots = [5, 10, 15] + for i in range(len(tops)): + G = bipartite.random_graph(tops[i], bots[i], 0.2) + top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0] + M = bipartite.biadjacency_matrix(G, top) + assert M.shape[0] == tops[i] + assert M.shape[1] == bots[i] + + def test_biadjacency_matrix_order(self): + G = nx.path_graph(5) + G.add_edge(0, 1, weight=2) + X = [3, 1] + Y = [4, 2, 0] + M = bipartite.biadjacency_matrix(G, X, Y, weight="weight") + assert M[1, 2] == 2 + + def test_biadjacency_matrix_empty_graph(self): + G = nx.empty_graph(2) + M = nx.bipartite.biadjacency_matrix(G, [0]) + assert np.array_equal(M.toarray(), np.array([[0]])) + + def test_null_graph(self): + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph(), []) + + def test_empty_graph(self): + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), []) + + def test_duplicate_row(self): + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1]) + + def test_duplicate_col(self): + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1]) + + def test_format_keyword(self): + with pytest.raises(nx.NetworkXError): + bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format="foo") + + def test_from_biadjacency_roundtrip(self): + B1 = nx.path_graph(5) + M = bipartite.biadjacency_matrix(B1, [0, 2, 4]) + B2 = bipartite.from_biadjacency_matrix(M) + assert nx.is_isomorphic(B1, B2) + + def test_from_biadjacency_weight(self): + M = sp.sparse.csc_array([[1, 2], [0, 3]]) + B = bipartite.from_biadjacency_matrix(M) + assert edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)]) + B = bipartite.from_biadjacency_matrix(M, edge_attribute="weight") + e = [(0, 2, {"weight": 1}), (0, 3, {"weight": 2}), (1, 3, {"weight": 3})] + assert edges_equal(B.edges(data=True), e) + + def test_from_biadjacency_multigraph(self): + M = sp.sparse.csc_array([[1, 2], [0, 3]]) + B = bipartite.from_biadjacency_matrix(M, create_using=nx.MultiGraph()) + assert edges_equal(B.edges(), [(0, 2), (0, 3), (0, 3), (1, 3), (1, 3), (1, 3)]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_project.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_project.py new file mode 100644 index 0000000..076bb42 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_project.py @@ -0,0 +1,407 @@ +import pytest + +import networkx as nx +from networkx.algorithms import bipartite +from networkx.utils import edges_equal, nodes_equal + + +class TestBipartiteProject: + def test_path_projected_graph(self): + G = nx.path_graph(4) + P = bipartite.projected_graph(G, [1, 3]) + assert nodes_equal(list(P), [1, 3]) + assert edges_equal(list(P.edges()), [(1, 3)]) + P = bipartite.projected_graph(G, [0, 2]) + assert nodes_equal(list(P), [0, 2]) + assert edges_equal(list(P.edges()), [(0, 2)]) + G = nx.MultiGraph([(0, 1)]) + with pytest.raises(nx.NetworkXError, match="not defined for multigraphs"): + bipartite.projected_graph(G, [0]) + + def test_path_projected_properties_graph(self): + G = nx.path_graph(4) + G.add_node(1, name="one") + G.add_node(2, name="two") + P = bipartite.projected_graph(G, [1, 3]) + assert nodes_equal(list(P), [1, 3]) + assert edges_equal(list(P.edges()), [(1, 3)]) + assert P.nodes[1]["name"] == G.nodes[1]["name"] + P = bipartite.projected_graph(G, [0, 2]) + assert nodes_equal(list(P), [0, 2]) + assert edges_equal(list(P.edges()), [(0, 2)]) + assert P.nodes[2]["name"] == G.nodes[2]["name"] + + def test_path_collaboration_projected_graph(self): + G = nx.path_graph(4) + P = bipartite.collaboration_weighted_projected_graph(G, [1, 3]) + assert nodes_equal(list(P), [1, 3]) + assert edges_equal(list(P.edges()), [(1, 3)]) + P[1][3]["weight"] = 1 + P = bipartite.collaboration_weighted_projected_graph(G, [0, 2]) + assert nodes_equal(list(P), [0, 2]) + assert edges_equal(list(P.edges()), [(0, 2)]) + P[0][2]["weight"] = 1 + + def test_directed_path_collaboration_projected_graph(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + P = bipartite.collaboration_weighted_projected_graph(G, [1, 3]) + assert nodes_equal(list(P), [1, 3]) + assert edges_equal(list(P.edges()), [(1, 3)]) + P[1][3]["weight"] = 1 + P = bipartite.collaboration_weighted_projected_graph(G, [0, 2]) + assert nodes_equal(list(P), [0, 2]) + assert edges_equal(list(P.edges()), [(0, 2)]) + P[0][2]["weight"] = 1 + + def test_path_weighted_projected_graph(self): + G = nx.path_graph(4) + + with pytest.raises(nx.NetworkXAlgorithmError): + bipartite.weighted_projected_graph(G, [1, 2, 3, 3]) + + P = bipartite.weighted_projected_graph(G, [1, 3]) + assert nodes_equal(list(P), [1, 3]) + assert edges_equal(list(P.edges()), [(1, 3)]) + P[1][3]["weight"] = 1 + P = bipartite.weighted_projected_graph(G, [0, 2]) + assert nodes_equal(list(P), [0, 2]) + assert edges_equal(list(P.edges()), [(0, 2)]) + P[0][2]["weight"] = 1 + + def test_digraph_weighted_projection(self): + G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)]) + P = bipartite.overlap_weighted_projected_graph(G, [1, 3]) + assert nx.get_edge_attributes(P, "weight") == {(1, 3): 1.0} + assert len(P) == 2 + + def test_path_weighted_projected_directed_graph(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + P = bipartite.weighted_projected_graph(G, [1, 3]) + assert nodes_equal(list(P), [1, 3]) + assert edges_equal(list(P.edges()), [(1, 3)]) + P[1][3]["weight"] = 1 + P = bipartite.weighted_projected_graph(G, [0, 2]) + assert nodes_equal(list(P), [0, 2]) + assert edges_equal(list(P.edges()), [(0, 2)]) + P[0][2]["weight"] = 1 + + def test_star_projected_graph(self): + G = nx.star_graph(3) + P = bipartite.projected_graph(G, [1, 2, 3]) + assert nodes_equal(list(P), [1, 2, 3]) + assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)]) + P = bipartite.weighted_projected_graph(G, [1, 2, 3]) + assert nodes_equal(list(P), [1, 2, 3]) + assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)]) + + P = bipartite.projected_graph(G, [0]) + assert nodes_equal(list(P), [0]) + assert edges_equal(list(P.edges()), []) + + def test_project_multigraph(self): + G = nx.Graph() + G.add_edge("a", 1) + G.add_edge("b", 1) + G.add_edge("a", 2) + G.add_edge("b", 2) + P = bipartite.projected_graph(G, "ab") + assert edges_equal(list(P.edges()), [("a", "b")]) + P = bipartite.weighted_projected_graph(G, "ab") + assert edges_equal(list(P.edges()), [("a", "b")]) + P = bipartite.projected_graph(G, "ab", multigraph=True) + assert edges_equal(list(P.edges()), [("a", "b"), ("a", "b")]) + + def test_project_collaboration(self): + G = nx.Graph() + G.add_edge("a", 1) + G.add_edge("b", 1) + G.add_edge("b", 2) + G.add_edge("c", 2) + G.add_edge("c", 3) + G.add_edge("c", 4) + G.add_edge("b", 4) + P = bipartite.collaboration_weighted_projected_graph(G, "abc") + assert P["a"]["b"]["weight"] == 1 + assert P["b"]["c"]["weight"] == 2 + + def test_directed_projection(self): + G = nx.DiGraph() + G.add_edge("A", 1) + G.add_edge(1, "B") + G.add_edge("A", 2) + G.add_edge("B", 2) + P = bipartite.projected_graph(G, "AB") + assert edges_equal(list(P.edges()), [("A", "B")]) + P = bipartite.weighted_projected_graph(G, "AB") + assert edges_equal(list(P.edges()), [("A", "B")]) + assert P["A"]["B"]["weight"] == 1 + + P = bipartite.projected_graph(G, "AB", multigraph=True) + assert edges_equal(list(P.edges()), [("A", "B")]) + + G = nx.DiGraph() + G.add_edge("A", 1) + G.add_edge(1, "B") + G.add_edge("A", 2) + G.add_edge(2, "B") + P = bipartite.projected_graph(G, "AB") + assert edges_equal(list(P.edges()), [("A", "B")]) + P = bipartite.weighted_projected_graph(G, "AB") + assert edges_equal(list(P.edges()), [("A", "B")]) + assert P["A"]["B"]["weight"] == 2 + + P = bipartite.projected_graph(G, "AB", multigraph=True) + assert edges_equal(list(P.edges()), [("A", "B"), ("A", "B")]) + + +class TestBipartiteWeightedProjection: + @classmethod + def setup_class(cls): + # Tore Opsahl's example + # http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/ + cls.G = nx.Graph() + cls.G.add_edge("A", 1) + cls.G.add_edge("A", 2) + cls.G.add_edge("B", 1) + cls.G.add_edge("B", 2) + cls.G.add_edge("B", 3) + cls.G.add_edge("B", 4) + cls.G.add_edge("B", 5) + cls.G.add_edge("C", 1) + cls.G.add_edge("D", 3) + cls.G.add_edge("E", 4) + cls.G.add_edge("E", 5) + cls.G.add_edge("E", 6) + cls.G.add_edge("F", 6) + # Graph based on figure 6 from Newman (2001) + cls.N = nx.Graph() + cls.N.add_edge("A", 1) + cls.N.add_edge("A", 2) + cls.N.add_edge("A", 3) + cls.N.add_edge("B", 1) + cls.N.add_edge("B", 2) + cls.N.add_edge("B", 3) + cls.N.add_edge("C", 1) + cls.N.add_edge("D", 1) + cls.N.add_edge("E", 3) + + def test_project_weighted_shared(self): + edges = [ + ("A", "B", 2), + ("A", "C", 1), + ("B", "C", 1), + ("B", "D", 1), + ("B", "E", 2), + ("E", "F", 1), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.weighted_projected_graph(self.G, "ABCDEF") + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3), + ("A", "E", 1), + ("A", "C", 1), + ("A", "D", 1), + ("B", "E", 1), + ("B", "C", 1), + ("B", "D", 1), + ("C", "D", 1), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.weighted_projected_graph(self.N, "ABCDE") + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + def test_project_weighted_newman(self): + edges = [ + ("A", "B", 1.5), + ("A", "C", 0.5), + ("B", "C", 0.5), + ("B", "D", 1), + ("B", "E", 2), + ("E", "F", 1), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.collaboration_weighted_projected_graph(self.G, "ABCDEF") + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 11 / 6.0), + ("A", "E", 1 / 2.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 2.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 3.0), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.collaboration_weighted_projected_graph(self.N, "ABCDE") + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + def test_project_weighted_ratio(self): + edges = [ + ("A", "B", 2 / 6.0), + ("A", "C", 1 / 6.0), + ("B", "C", 1 / 6.0), + ("B", "D", 1 / 6.0), + ("B", "E", 2 / 6.0), + ("E", "F", 1 / 6.0), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True) + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 3.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 3.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 3.0), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True) + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + def test_project_weighted_overlap(self): + edges = [ + ("A", "B", 2 / 2.0), + ("A", "C", 1 / 1.0), + ("B", "C", 1 / 1.0), + ("B", "D", 1 / 1.0), + ("B", "E", 2 / 3.0), + ("E", "F", 1 / 1.0), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF", jaccard=False) + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 1.0), + ("A", "C", 1 / 1.0), + ("A", "D", 1 / 1.0), + ("B", "E", 1 / 1.0), + ("B", "C", 1 / 1.0), + ("B", "D", 1 / 1.0), + ("C", "D", 1 / 1.0), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE", jaccard=False) + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + def test_project_weighted_jaccard(self): + edges = [ + ("A", "B", 2 / 5.0), + ("A", "C", 1 / 2.0), + ("B", "C", 1 / 5.0), + ("B", "D", 1 / 5.0), + ("B", "E", 2 / 6.0), + ("E", "F", 1 / 3.0), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF") + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in list(P.edges()): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + edges = [ + ("A", "B", 3 / 3.0), + ("A", "E", 1 / 3.0), + ("A", "C", 1 / 3.0), + ("A", "D", 1 / 3.0), + ("B", "E", 1 / 3.0), + ("B", "C", 1 / 3.0), + ("B", "D", 1 / 3.0), + ("C", "D", 1 / 1.0), + ] + Panswer = nx.Graph() + Panswer.add_weighted_edges_from(edges) + P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE") + assert edges_equal(list(P.edges()), Panswer.edges()) + for u, v in P.edges(): + assert P[u][v]["weight"] == Panswer[u][v]["weight"] + + def test_generic_weighted_projected_graph_simple(self): + def shared(G, u, v): + return len(set(G[u]) & set(G[v])) + + B = nx.path_graph(5) + G = bipartite.generic_weighted_projected_graph( + B, [0, 2, 4], weight_function=shared + ) + assert nodes_equal(list(G), [0, 2, 4]) + assert edges_equal( + list(G.edges(data=True)), + [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})], + ) + + G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4]) + assert nodes_equal(list(G), [0, 2, 4]) + assert edges_equal( + list(G.edges(data=True)), + [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})], + ) + B = nx.DiGraph() + nx.add_path(B, range(5)) + G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4]) + assert nodes_equal(list(G), [0, 2, 4]) + assert edges_equal( + list(G.edges(data=True)), [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})] + ) + + def test_generic_weighted_projected_graph_custom(self): + def jaccard(G, u, v): + unbrs = set(G[u]) + vnbrs = set(G[v]) + return len(unbrs & vnbrs) / len(unbrs | vnbrs) + + def my_weight(G, u, v, weight="weight"): + w = 0 + for nbr in set(G[u]) & set(G[v]): + w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1) + return w + + B = nx.bipartite.complete_bipartite_graph(2, 2) + for i, (u, v) in enumerate(B.edges()): + B.edges[u, v]["weight"] = i + 1 + G = bipartite.generic_weighted_projected_graph( + B, [0, 1], weight_function=jaccard + ) + assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 1.0})]) + G = bipartite.generic_weighted_projected_graph( + B, [0, 1], weight_function=my_weight + ) + assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 10})]) + G = bipartite.generic_weighted_projected_graph(B, [0, 1]) + assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 2})]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py new file mode 100644 index 0000000..8d979db --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_redundancy.py @@ -0,0 +1,35 @@ +"""Unit tests for the :mod:`networkx.algorithms.bipartite.redundancy` module.""" + +import pytest + +from networkx import NetworkXError, cycle_graph +from networkx.algorithms.bipartite import complete_bipartite_graph, node_redundancy + + +def test_no_redundant_nodes(): + G = complete_bipartite_graph(2, 2) + + # when nodes is None + rc = node_redundancy(G) + assert all(redundancy == 1 for redundancy in rc.values()) + + # when set of nodes is specified + rc = node_redundancy(G, (2, 3)) + assert rc == {2: 1.0, 3: 1.0} + + +def test_redundant_nodes(): + G = cycle_graph(6) + edge = {0, 3} + G.add_edge(*edge) + redundancy = node_redundancy(G) + for v in edge: + assert redundancy[v] == 2 / 3 + for v in set(G) - edge: + assert redundancy[v] == 1 + + +def test_not_enough_neighbors(): + with pytest.raises(NetworkXError): + G = complete_bipartite_graph(1, 2) + node_redundancy(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py new file mode 100644 index 0000000..b940649 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py @@ -0,0 +1,80 @@ +import pytest + +pytest.importorskip("scipy") + +import networkx as nx +from networkx.algorithms.bipartite import spectral_bipartivity as sb + +# Examples from Figure 1 +# E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of +# bipartivity in complex networks", PhysRev E 72, 046105 (2005) + + +class TestSpectralBipartivity: + def test_star_like(self): + # star-like + + G = nx.star_graph(2) + G.add_edge(1, 2) + assert sb(G) == pytest.approx(0.843, abs=1e-3) + + G = nx.star_graph(3) + G.add_edge(1, 2) + assert sb(G) == pytest.approx(0.871, abs=1e-3) + + G = nx.star_graph(4) + G.add_edge(1, 2) + assert sb(G) == pytest.approx(0.890, abs=1e-3) + + def test_k23_like(self): + # K2,3-like + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(0, 1) + assert sb(G) == pytest.approx(0.769, abs=1e-3) + + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(2, 4) + assert sb(G) == pytest.approx(0.829, abs=1e-3) + + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(2, 4) + G.add_edge(3, 4) + assert sb(G) == pytest.approx(0.731, abs=1e-3) + + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(0, 1) + G.add_edge(2, 4) + assert sb(G) == pytest.approx(0.692, abs=1e-3) + + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(2, 4) + G.add_edge(3, 4) + G.add_edge(0, 1) + assert sb(G) == pytest.approx(0.645, abs=1e-3) + + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(2, 4) + G.add_edge(3, 4) + G.add_edge(2, 3) + assert sb(G) == pytest.approx(0.645, abs=1e-3) + + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(2, 4) + G.add_edge(3, 4) + G.add_edge(2, 3) + G.add_edge(0, 1) + assert sb(G) == pytest.approx(0.597, abs=1e-3) + + def test_single_nodes(self): + # single nodes + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(2, 4) + sbn = sb(G, nodes=[1, 2]) + assert sbn[1] == pytest.approx(0.85, abs=1e-2) + assert sbn[2] == pytest.approx(0.77, abs=1e-2) + + G = nx.complete_bipartite_graph(2, 3) + G.add_edge(0, 1) + sbn = sb(G, nodes=[1, 2]) + assert sbn[1] == pytest.approx(0.73, abs=1e-2) + assert sbn[2] == pytest.approx(0.82, abs=1e-2) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py new file mode 100644 index 0000000..ba05d80 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/boundary.py @@ -0,0 +1,168 @@ +"""Routines to find the boundary of a set of nodes. + +An edge boundary is a set of edges, each of which has exactly one +endpoint in a given set of nodes (or, in the case of directed graphs, +the set of edges whose source node is in the set). + +A node boundary of a set *S* of nodes is the set of (out-)neighbors of +nodes in *S* that are outside *S*. + +""" + +from itertools import chain + +import networkx as nx + +__all__ = ["edge_boundary", "node_boundary"] + + +@nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data") +def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None): + """Returns the edge boundary of `nbunch1`. + + The *edge boundary* of a set *S* with respect to a set *T* is the + set of edges (*u*, *v*) such that *u* is in *S* and *v* is in *T*. + If *T* is not specified, it is assumed to be the set of all nodes + not in *S*. + + Parameters + ---------- + G : NetworkX graph + + nbunch1 : iterable + Iterable of nodes in the graph representing the set of nodes + whose edge boundary will be returned. (This is the set *S* from + the definition above.) + + nbunch2 : iterable + Iterable of nodes representing the target (or "exterior") set of + nodes. (This is the set *T* from the definition above.) If not + specified, this is assumed to be the set of all nodes in `G` + not in `nbunch1`. + + keys : bool + This parameter has the same meaning as in + :meth:`MultiGraph.edges`. + + data : bool or object + This parameter has the same meaning as in + :meth:`MultiGraph.edges`. + + default : object + This parameter has the same meaning as in + :meth:`MultiGraph.edges`. + + Returns + ------- + iterator + An iterator over the edges in the boundary of `nbunch1` with + respect to `nbunch2`. If `keys`, `data`, or `default` + are specified and `G` is a multigraph, then edges are returned + with keys and/or data, as in :meth:`MultiGraph.edges`. + + Examples + -------- + >>> G = nx.wheel_graph(6) + + When nbunch2=None: + + >>> list(nx.edge_boundary(G, (1, 3))) + [(1, 0), (1, 2), (1, 5), (3, 0), (3, 2), (3, 4)] + + When nbunch2 is given: + + >>> list(nx.edge_boundary(G, (1, 3), (2, 0))) + [(1, 0), (1, 2), (3, 0), (3, 2)] + + Notes + ----- + Any element of `nbunch` that is not in the graph `G` will be + ignored. + + `nbunch1` and `nbunch2` are usually meant to be disjoint, but in + the interest of speed and generality, that is not required here. + + """ + nset1 = {n for n in nbunch1 if n in G} + # Here we create an iterator over edges incident to nodes in the set + # `nset1`. The `Graph.edges()` method does not provide a guarantee + # on the orientation of the edges, so our algorithm below must + # handle the case in which exactly one orientation, either (u, v) or + # (v, u), appears in this iterable. + if G.is_multigraph(): + edges = G.edges(nset1, data=data, keys=keys, default=default) + else: + edges = G.edges(nset1, data=data, default=default) + # If `nbunch2` is not provided, then it is assumed to be the set + # complement of `nbunch1`. For the sake of efficiency, this is + # implemented by using the `not in` operator, instead of by creating + # an additional set and using the `in` operator. + if nbunch2 is None: + return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1)) + nset2 = set(nbunch2) + return ( + e + for e in edges + if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2) + ) + + +@nx._dispatchable +def node_boundary(G, nbunch1, nbunch2=None): + """Returns the node boundary of `nbunch1`. + + The *node boundary* of a set *S* with respect to a set *T* is the + set of nodes *v* in *T* such that for some *u* in *S*, there is an + edge joining *u* to *v*. If *T* is not specified, it is assumed to + be the set of all nodes not in *S*. + + Parameters + ---------- + G : NetworkX graph + + nbunch1 : iterable + Iterable of nodes in the graph representing the set of nodes + whose node boundary will be returned. (This is the set *S* from + the definition above.) + + nbunch2 : iterable + Iterable of nodes representing the target (or "exterior") set of + nodes. (This is the set *T* from the definition above.) If not + specified, this is assumed to be the set of all nodes in `G` + not in `nbunch1`. + + Returns + ------- + set + The node boundary of `nbunch1` with respect to `nbunch2`. + + Examples + -------- + >>> G = nx.wheel_graph(6) + + When nbunch2=None: + + >>> list(nx.node_boundary(G, (3, 4))) + [0, 2, 5] + + When nbunch2 is given: + + >>> list(nx.node_boundary(G, (3, 4), (0, 1, 5))) + [0, 5] + + Notes + ----- + Any element of `nbunch` that is not in the graph `G` will be + ignored. + + `nbunch1` and `nbunch2` are usually meant to be disjoint, but in + the interest of speed and generality, that is not required here. + + """ + nset1 = {n for n in nbunch1 if n in G} + bdy = set(chain.from_iterable(G[v] for v in nset1)) - nset1 + # If `nbunch2` is not specified, it is assumed to be the set + # complement of `nbunch1`. + if nbunch2 is not None: + bdy &= set(nbunch2) + return bdy diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py new file mode 100644 index 0000000..eaa6fd3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/bridges.py @@ -0,0 +1,205 @@ +"""Bridge-finding algorithms.""" + +from itertools import chain + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["bridges", "has_bridges", "local_bridges"] + + +@not_implemented_for("directed") +@nx._dispatchable +def bridges(G, root=None): + """Generate all bridges in a graph. + + A *bridge* in a graph is an edge whose removal causes the number of + connected components of the graph to increase. Equivalently, a bridge is an + edge that does not belong to any cycle. Bridges are also known as cut-edges, + isthmuses, or cut arcs. + + Parameters + ---------- + G : undirected graph + + root : node (optional) + A node in the graph `G`. If specified, only the bridges in the + connected component containing this node will be returned. + + Yields + ------ + e : edge + An edge in the graph whose removal disconnects the graph (or + causes the number of connected components to increase). + + Raises + ------ + NodeNotFound + If `root` is not in the graph `G`. + + NetworkXNotImplemented + If `G` is a directed graph. + + Examples + -------- + The barbell graph with parameter zero has a single bridge: + + >>> G = nx.barbell_graph(10, 0) + >>> list(nx.bridges(G)) + [(9, 10)] + + Notes + ----- + This is an implementation of the algorithm described in [1]_. An edge is a + bridge if and only if it is not contained in any chain. Chains are found + using the :func:`networkx.chain_decomposition` function. + + The algorithm described in [1]_ requires a simple graph. If the provided + graph is a multigraph, we convert it to a simple graph and verify that any + bridges discovered by the chain decomposition algorithm are not multi-edges. + + Ignoring polylogarithmic factors, the worst-case time complexity is the + same as the :func:`networkx.chain_decomposition` function, + $O(m + n)$, where $n$ is the number of nodes in the graph and $m$ is + the number of edges. + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions + """ + multigraph = G.is_multigraph() + H = nx.Graph(G) if multigraph else G + chains = nx.chain_decomposition(H, root=root) + chain_edges = set(chain.from_iterable(chains)) + if root is not None: + H = H.subgraph(nx.node_connected_component(H, root)).copy() + for u, v in H.edges(): + if (u, v) not in chain_edges and (v, u) not in chain_edges: + if multigraph and len(G[u][v]) > 1: + continue + yield u, v + + +@not_implemented_for("directed") +@nx._dispatchable +def has_bridges(G, root=None): + """Decide whether a graph has any bridges. + + A *bridge* in a graph is an edge whose removal causes the number of + connected components of the graph to increase. + + Parameters + ---------- + G : undirected graph + + root : node (optional) + A node in the graph `G`. If specified, only the bridges in the + connected component containing this node will be considered. + + Returns + ------- + bool + Whether the graph (or the connected component containing `root`) + has any bridges. + + Raises + ------ + NodeNotFound + If `root` is not in the graph `G`. + + NetworkXNotImplemented + If `G` is a directed graph. + + Examples + -------- + The barbell graph with parameter zero has a single bridge:: + + >>> G = nx.barbell_graph(10, 0) + >>> nx.has_bridges(G) + True + + On the other hand, the cycle graph has no bridges:: + + >>> G = nx.cycle_graph(5) + >>> nx.has_bridges(G) + False + + Notes + ----- + This implementation uses the :func:`networkx.bridges` function, so + it shares its worst-case time complexity, $O(m + n)$, ignoring + polylogarithmic factors, where $n$ is the number of nodes in the + graph and $m$ is the number of edges. + + """ + try: + next(bridges(G, root=root)) + except StopIteration: + return False + else: + return True + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def local_bridges(G, with_span=True, weight=None): + """Iterate over local bridges of `G` optionally computing the span + + A *local bridge* is an edge whose endpoints have no common neighbors. + That is, the edge is not part of a triangle in the graph. + + The *span* of a *local bridge* is the shortest path length between + the endpoints if the local bridge is removed. + + Parameters + ---------- + G : undirected graph + + with_span : bool + If True, yield a 3-tuple `(u, v, span)` + + weight : function, string or None (default: None) + If function, used to compute edge weights for the span. + If string, the edge data attribute used in calculating span. + If None, all edges have weight 1. + + Yields + ------ + e : edge + The local bridges as an edge 2-tuple of nodes `(u, v)` or + as a 3-tuple `(u, v, span)` when `with_span is True`. + + Raises + ------ + NetworkXNotImplemented + If `G` is a directed graph or multigraph. + + Examples + -------- + A cycle graph has every edge a local bridge with span N-1. + + >>> G = nx.cycle_graph(9) + >>> (0, 8, 8) in set(nx.local_bridges(G)) + True + """ + if with_span is not True: + for u, v in G.edges: + if not (set(G[u]) & set(G[v])): + yield u, v + else: + wt = nx.weighted._weight_function(G, weight) + for u, v in G.edges: + if not (set(G[u]) & set(G[v])): + enodes = {u, v} + + def hide_edge(n, nbr, d): + if n not in enodes or nbr not in enodes: + return wt(n, nbr, d) + return None + + try: + span = nx.shortest_path_length(G, u, v, weight=hide_edge) + yield u, v, span + except nx.NetworkXNoPath: + yield u, v, float("inf") diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py new file mode 100644 index 0000000..db8553b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/broadcasting.py @@ -0,0 +1,155 @@ +"""Routines to calculate the broadcast time of certain graphs. + +Broadcasting is an information dissemination problem in which a node in a graph, +called the originator, must distribute a message to all other nodes by placing +a series of calls along the edges of the graph. Once informed, other nodes aid +the originator in distributing the message. + +The broadcasting must be completed as quickly as possible subject to the +following constraints: +- Each call requires one unit of time. +- A node can only participate in one call per unit of time. +- Each call only involves two adjacent nodes: a sender and a receiver. +""" + +import networkx as nx +from networkx import NetworkXError +from networkx.utils import not_implemented_for + +__all__ = [ + "tree_broadcast_center", + "tree_broadcast_time", +] + + +def _get_max_broadcast_value(G, U, v, values): + adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True) + return max(values[u] + i for i, u in enumerate(adj, start=1)) + + +def _get_broadcast_centers(G, v, values, target): + adj = sorted(G.neighbors(v), key=values.get, reverse=True) + j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target) + return set([v] + adj[:j]) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def tree_broadcast_center(G): + """Return the Broadcast Center of the tree `G`. + + The broadcast center of a graph G denotes the set of nodes having + minimum broadcast time [1]_. This is a linear algorithm for determining + the broadcast center of a tree with ``N`` nodes, as a by-product it also + determines the broadcast time from the broadcast center. + + Parameters + ---------- + G : undirected graph + The graph should be an undirected tree + + Returns + ------- + BC : (int, set) tuple + minimum broadcast number of the tree, set of broadcast centers + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T, + Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981) + """ + # Assert that the graph G is a tree + if not nx.is_tree(G): + NetworkXError("Input graph is not a tree") + # step 0 + if G.number_of_nodes() == 2: + return 1, set(G.nodes()) + if G.number_of_nodes() == 1: + return 0, set(G.nodes()) + + # step 1 + U = {node for node, deg in G.degree if deg == 1} + values = dict.fromkeys(U, 0) + T = G.copy() + T.remove_nodes_from(U) + + # step 2 + W = {node for node, deg in T.degree if deg == 1} + values.update((w, G.degree[w] - 1) for w in W) + + # step 3 + while T.number_of_nodes() >= 2: + # step 4 + w = min(W, key=lambda n: values[n]) + v = next(T.neighbors(w)) + + # step 5 + U.add(w) + W.remove(w) + T.remove_node(w) + + # step 6 + if T.degree(v) == 1: + # update t(v) + values.update({v: _get_max_broadcast_value(G, U, v, values)}) + W.add(v) + + # step 7 + v = nx.utils.arbitrary_element(T) + b_T = _get_max_broadcast_value(G, U, v, values) + return b_T, _get_broadcast_centers(G, v, values, b_T) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def tree_broadcast_time(G, node=None): + """Return the Broadcast Time of the tree `G`. + + The minimum broadcast time of a node is defined as the minimum amount + of time required to complete broadcasting starting from the + originator. The broadcast time of a graph is the maximum over + all nodes of the minimum broadcast time from that node [1]_. + This function returns the minimum broadcast time of `node`. + If `node` is None the broadcast time for the graph is returned. + + Parameters + ---------- + G : undirected graph + The graph should be an undirected tree + node: int, optional + index of starting node. If `None`, the algorithm returns the broadcast + time of the tree. + + Returns + ------- + BT : int + Broadcast Time of a node in a tree + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + References + ---------- + .. [1] Harutyunyan, H. A. and Li, Z. + "A Simple Construction of Broadcast Graphs." + In Computing and Combinatorics. COCOON 2019 + (Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019. + """ + b_T, b_C = tree_broadcast_center(G) + if node is not None: + return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C) + dist_from_center = dict.fromkeys(G, len(G)) + for u in b_C: + for v, dist in nx.shortest_path_length(G, u).items(): + if dist < dist_from_center[v]: + dist_from_center[v] = dist + return b_T + max(dist_from_center.values()) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__init__.py new file mode 100644 index 0000000..c91a904 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__init__.py @@ -0,0 +1,20 @@ +from .betweenness import * +from .betweenness_subset import * +from .closeness import * +from .current_flow_betweenness import * +from .current_flow_betweenness_subset import * +from .current_flow_closeness import * +from .degree_alg import * +from .dispersion import * +from .eigenvector import * +from .group import * +from .harmonic import * +from .katz import * +from .load import * +from .percolation import * +from .reaching import * +from .second_order import * +from .subgraph_alg import * +from .trophic import * +from .voterank_alg import * +from .laplacian import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1cffea1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-312.pyc new file mode 100644 index 0000000..e366a71 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/betweenness.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-312.pyc new file mode 100644 index 0000000..53326ad Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/betweenness_subset.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-312.pyc new file mode 100644 index 0000000..74e31e5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/closeness.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-312.pyc new file mode 100644 index 0000000..0f5d33b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-312.pyc new file mode 100644 index 0000000..8665d30 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_betweenness_subset.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-312.pyc new file mode 100644 index 0000000..af8691f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/current_flow_closeness.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-312.pyc new file mode 100644 index 0000000..d88df27 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/degree_alg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-312.pyc new file mode 100644 index 0000000..24d876e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/dispersion.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-312.pyc new file mode 100644 index 0000000..aff74c6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/eigenvector.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-312.pyc new file mode 100644 index 0000000..90ea07e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/flow_matrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-312.pyc new file mode 100644 index 0000000..c83a821 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/group.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-312.pyc new file mode 100644 index 0000000..dc9ba66 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/harmonic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-312.pyc new file mode 100644 index 0000000..98c57b2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/katz.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/laplacian.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/laplacian.cpython-312.pyc new file mode 100644 index 0000000..47c112a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/laplacian.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-312.pyc new file mode 100644 index 0000000..3f9cca9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/load.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-312.pyc new file mode 100644 index 0000000..979d20f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/percolation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-312.pyc new file mode 100644 index 0000000..669353c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/reaching.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-312.pyc new file mode 100644 index 0000000..a031846 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/second_order.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-312.pyc new file mode 100644 index 0000000..1e81b6a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/subgraph_alg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-312.pyc new file mode 100644 index 0000000..8998922 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/trophic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-312.pyc new file mode 100644 index 0000000..09ce940 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/__pycache__/voterank_alg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness.py new file mode 100644 index 0000000..d945a55 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness.py @@ -0,0 +1,469 @@ +"""Betweenness centrality measures.""" + +import math +from collections import deque +from heapq import heappop, heappush +from itertools import count + +import networkx as nx +from networkx.algorithms.shortest_paths.weighted import _weight_function +from networkx.utils import py_random_state +from networkx.utils.decorators import not_implemented_for + +__all__ = ["betweenness_centrality", "edge_betweenness_centrality"] + + +@py_random_state(5) +@nx._dispatchable(edge_attrs="weight") +def betweenness_centrality( + G, k=None, normalized=True, weight=None, endpoints=False, seed=None +): + r"""Compute the shortest-path betweenness centrality for nodes. + + Betweenness centrality of a node $v$ is the sum of the + fraction of all-pairs shortest paths that pass through $v$ + + .. math:: + + c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)} + + where $V$ is the set of nodes, $\sigma(s, t)$ is the number of + shortest $(s, t)$-paths, and $\sigma(s, t|v)$ is the number of + those paths passing through some node $v$ other than $s, t$. + If $s = t$, $\sigma(s, t) = 1$, and if $v \in {s, t}$, + $\sigma(s, t|v) = 0$ [2]_. + + Parameters + ---------- + G : graph + A NetworkX graph. + + k : int, optional (default=None) + If k is not None use k node samples to estimate betweenness. + The value of k <= n where n is the number of nodes in the graph. + Higher values give better approximation. + + normalized : bool, optional + If True the betweenness values are normalized by `2/((n-1)(n-2))` + for graphs, and `1/((n-1)(n-2))` for directed graphs where `n` + is the number of nodes in G. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + Weights are used to calculate weighted shortest paths, so they are + interpreted as distances. + + endpoints : bool, optional + If True include the endpoints in the shortest path counts. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Note that this is only used if k is not None. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with betweenness centrality as the value. + + See Also + -------- + edge_betweenness_centrality + load_centrality + + Notes + ----- + The algorithm is from Ulrik Brandes [1]_. + See [4]_ for the original first published version and [2]_ for details on + algorithms for variations and related metrics. + + For approximate betweenness calculations set k=#samples to use + k nodes ("pivots") to estimate the betweenness values. For an estimate + of the number of pivots needed see [3]_. + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + The total number of paths between source and target is counted + differently for directed and undirected graphs. Directed paths + are easy to count. Undirected paths are tricky: should a path + from "u" to "v" count as 1 undirected path or as 2 directed paths? + + For betweenness_centrality we report the number of undirected + paths when G is undirected. + + For betweenness_centrality_subset the reporting is different. + If the source and target subsets are the same, then we want + to count undirected paths. But if the source and target subsets + differ -- for example, if sources is {0} and targets is {1}, + then we are only counting the paths in one direction. They are + undirected paths but we are counting them in a directed way. + To count them as undirected paths, each should count as half a path. + + This algorithm is not guaranteed to be correct if edge weights + are floating point numbers. As a workaround you can use integer + numbers by multiplying the relevant edge attributes by a convenient + constant factor (eg 100) and converting to integers. + + References + ---------- + .. [1] Ulrik Brandes: + A Faster Algorithm for Betweenness Centrality. + Journal of Mathematical Sociology 25(2):163-177, 2001. + https://doi.org/10.1080/0022250X.2001.9990249 + .. [2] Ulrik Brandes: + On Variants of Shortest-Path Betweenness + Centrality and their Generic Computation. + Social Networks 30(2):136-145, 2008. + https://doi.org/10.1016/j.socnet.2007.11.001 + .. [3] Ulrik Brandes and Christian Pich: + Centrality Estimation in Large Networks. + International Journal of Bifurcation and Chaos 17(7):2303-2318, 2007. + https://dx.doi.org/10.1142/S0218127407018403 + .. [4] Linton C. Freeman: + A set of measures of centrality based on betweenness. + Sociometry 40: 35–41, 1977 + https://doi.org/10.2307/3033543 + """ + betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G + if k == len(G): + # This is done for performance; the result is the same regardless. + k = None + if k is None: + nodes = G + else: + nodes = seed.sample(list(G.nodes()), k) + for s in nodes: + # single source shortest paths + if weight is None: # use BFS + S, P, sigma, _ = _single_source_shortest_path_basic(G, s) + else: # use Dijkstra's algorithm + S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight) + # accumulation + if endpoints: + betweenness, _ = _accumulate_endpoints(betweenness, S, P, sigma, s) + else: + betweenness, _ = _accumulate_basic(betweenness, S, P, sigma, s) + # rescaling + betweenness = _rescale( + betweenness, + len(G), + normalized=normalized, + directed=G.is_directed(), + k=k, + endpoints=endpoints, + sampled_nodes=nodes, + ) + return betweenness + + +@py_random_state(4) +@nx._dispatchable(edge_attrs="weight") +def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None): + r"""Compute betweenness centrality for edges. + + Betweenness centrality of an edge $e$ is the sum of the + fraction of all-pairs shortest paths that pass through $e$ + + .. math:: + + c_B(e) =\sum_{s,t \in V} \frac{\sigma(s, t|e)}{\sigma(s, t)} + + where $V$ is the set of nodes, $\sigma(s, t)$ is the number of + shortest $(s, t)$-paths, and $\sigma(s, t|e)$ is the number of + those paths passing through edge $e$ [2]_. + + Parameters + ---------- + G : graph + A NetworkX graph. + + k : int, optional (default=None) + If k is not None use k node samples to estimate betweenness. + The value of k <= n where n is the number of nodes in the graph. + Higher values give better approximation. + + normalized : bool, optional + If True the betweenness values are normalized by $2/(n(n-1))$ + for graphs, and $1/(n(n-1))$ for directed graphs where $n$ + is the number of nodes in G. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + Weights are used to calculate weighted shortest paths, so they are + interpreted as distances. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Note that this is only used if k is not None. + + Returns + ------- + edges : dictionary + Dictionary of edges with betweenness centrality as the value. + + See Also + -------- + betweenness_centrality + edge_load + + Notes + ----- + The algorithm is from Ulrik Brandes [1]_. + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + References + ---------- + .. [1] A Faster Algorithm for Betweenness Centrality. Ulrik Brandes, + Journal of Mathematical Sociology 25(2):163-177, 2001. + https://doi.org/10.1080/0022250X.2001.9990249 + .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness + Centrality and their Generic Computation. + Social Networks 30(2):136-145, 2008. + https://doi.org/10.1016/j.socnet.2007.11.001 + """ + betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G + # b[e]=0 for e in G.edges() + betweenness.update(dict.fromkeys(G.edges(), 0.0)) + if k is None: + nodes = G + else: + nodes = seed.sample(list(G.nodes()), k) + for s in nodes: + # single source shortest paths + if weight is None: # use BFS + S, P, sigma, _ = _single_source_shortest_path_basic(G, s) + else: # use Dijkstra's algorithm + S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight) + # accumulation + betweenness = _accumulate_edges(betweenness, S, P, sigma, s) + # rescaling + for n in G: # remove nodes to only return edges + del betweenness[n] + betweenness = _rescale_e( + betweenness, len(G), normalized=normalized, directed=G.is_directed() + ) + if G.is_multigraph(): + betweenness = _add_edge_keys(G, betweenness, weight=weight) + return betweenness + + +# helpers for betweenness centrality + + +def _single_source_shortest_path_basic(G, s): + S = [] + P = {} + for v in G: + P[v] = [] + sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G + D = {} + sigma[s] = 1.0 + D[s] = 0 + Q = deque([s]) + while Q: # use BFS to find shortest paths + v = Q.popleft() + S.append(v) + Dv = D[v] + sigmav = sigma[v] + for w in G[v]: + if w not in D: + Q.append(w) + D[w] = Dv + 1 + if D[w] == Dv + 1: # this is a shortest path, count paths + sigma[w] += sigmav + P[w].append(v) # predecessors + return S, P, sigma, D + + +def _single_source_dijkstra_path_basic(G, s, weight): + weight = _weight_function(G, weight) + # modified from Eppstein + S = [] + P = {} + for v in G: + P[v] = [] + sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G + D = {} + sigma[s] = 1.0 + seen = {s: 0} + c = count() + Q = [] # use Q as heap with (distance,node id) tuples + heappush(Q, (0, next(c), s, s)) + while Q: + (dist, _, pred, v) = heappop(Q) + if v in D: + continue # already searched this node. + sigma[v] += sigma[pred] # count paths + S.append(v) + D[v] = dist + for w, edgedata in G[v].items(): + vw_dist = dist + weight(v, w, edgedata) + if w not in D and (w not in seen or vw_dist < seen[w]): + seen[w] = vw_dist + heappush(Q, (vw_dist, next(c), v, w)) + sigma[w] = 0.0 + P[w] = [v] + elif vw_dist == seen[w]: # handle equal paths + sigma[w] += sigma[v] + P[w].append(v) + return S, P, sigma, D + + +def _accumulate_basic(betweenness, S, P, sigma, s): + delta = dict.fromkeys(S, 0) + while S: + w = S.pop() + coeff = (1 + delta[w]) / sigma[w] + for v in P[w]: + delta[v] += sigma[v] * coeff + if w != s: + betweenness[w] += delta[w] + return betweenness, delta + + +def _accumulate_endpoints(betweenness, S, P, sigma, s): + betweenness[s] += len(S) - 1 + delta = dict.fromkeys(S, 0) + while S: + w = S.pop() + coeff = (1 + delta[w]) / sigma[w] + for v in P[w]: + delta[v] += sigma[v] * coeff + if w != s: + betweenness[w] += delta[w] + 1 + return betweenness, delta + + +def _accumulate_edges(betweenness, S, P, sigma, s): + delta = dict.fromkeys(S, 0) + while S: + w = S.pop() + coeff = (1 + delta[w]) / sigma[w] + for v in P[w]: + c = sigma[v] * coeff + if (v, w) not in betweenness: + betweenness[(w, v)] += c + else: + betweenness[(v, w)] += c + delta[v] += c + if w != s: + betweenness[w] += delta[w] + return betweenness + + +def _rescale(betweenness, n, *, normalized, directed, k, endpoints, sampled_nodes): + # N is used to count the number of valid (s, t) pairs where s != t that + # could have a path pass through v. If endpoints is False, then v must + # not be the target t, hence why we subtract by 1. + N = n if endpoints else n - 1 + if N < 2: + # No rescaling necessary: b=0 for all nodes + return betweenness + + K_source = N if k is None else k + + if k is None or endpoints: + # No sampling adjustment needed + if normalized: + # Divide by the number of valid (s, t) node pairs that could have + # a path through v where s != t. + scale = 1 / (K_source * (N - 1)) + else: + # Scale to the full BC + if not directed: + # The non-normalized BC values are computed the same way for + # directed and undirected graphs: shortest paths are computed and + # counted for each *ordered* (s, t) pair. Undirected graphs should + # only count valid *unordered* node pairs {s, t}; that is, (s, t) + # and (t, s) should be counted only once. We correct for this here. + correction = 2 + else: + correction = 1 + scale = N / (K_source * correction) + + if scale != 1: + for v in betweenness: + betweenness[v] *= scale + return betweenness + + # Sampling adjustment needed when excluding endpoints when using k. In this + # case, we need to handle source nodes differently from non-source nodes, + # because source nodes can't include themselves since endpoints are excluded. + # Without this, k == n would be a special case that would violate the + # assumption that node `v` is not one of the (s, t) node pairs. + if normalized: + # NaN for undefined 0/0; there is no data for source node when k=1 + scale_source = 1 / ((K_source - 1) * (N - 1)) if K_source > 1 else math.nan + scale_nonsource = 1 / (K_source * (N - 1)) + else: + correction = 1 if directed else 2 + scale_source = N / ((K_source - 1) * correction) if K_source > 1 else math.nan + scale_nonsource = N / (K_source * correction) + + sampled_nodes = set(sampled_nodes) + for v in betweenness: + betweenness[v] *= scale_source if v in sampled_nodes else scale_nonsource + return betweenness + + +def _rescale_e(betweenness, n, normalized, directed=False, k=None): + if normalized: + if n <= 1: + scale = None # no normalization b=0 for all nodes + else: + scale = 1 / (n * (n - 1)) + else: # rescale by 2 for undirected graphs + if not directed: + scale = 0.5 + else: + scale = None + if scale is not None: + if k is not None: + scale = scale * n / k + for v in betweenness: + betweenness[v] *= scale + return betweenness + + +@not_implemented_for("graph") +def _add_edge_keys(G, betweenness, weight=None): + r"""Adds the corrected betweenness centrality (BC) values for multigraphs. + + Parameters + ---------- + G : NetworkX graph. + + betweenness : dictionary + Dictionary mapping adjacent node tuples to betweenness centrality values. + + weight : string or function + See `_weight_function` for details. Defaults to `None`. + + Returns + ------- + edges : dictionary + The parameter `betweenness` including edges with keys and their + betweenness centrality values. + + The BC value is divided among edges of equal weight. + """ + _weight = _weight_function(G, weight) + + edge_bc = dict.fromkeys(G.edges, 0.0) + for u, v in betweenness: + d = G[u][v] + wt = _weight(u, v, d) + keys = [k for k in d if _weight(u, v, {k: d[k]}) == wt] + bc = betweenness[(u, v)] / len(keys) + for k in keys: + edge_bc[(u, v, k)] = bc + + return edge_bc diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness_subset.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness_subset.py new file mode 100644 index 0000000..b9e9936 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/betweenness_subset.py @@ -0,0 +1,275 @@ +"""Betweenness centrality measures for subsets of nodes.""" + +import networkx as nx +from networkx.algorithms.centrality.betweenness import ( + _add_edge_keys, +) +from networkx.algorithms.centrality.betweenness import ( + _single_source_dijkstra_path_basic as dijkstra, +) +from networkx.algorithms.centrality.betweenness import ( + _single_source_shortest_path_basic as shortest_path, +) + +__all__ = [ + "betweenness_centrality_subset", + "edge_betweenness_centrality_subset", +] + + +@nx._dispatchable(edge_attrs="weight") +def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None): + r"""Compute betweenness centrality for a subset of nodes. + + .. math:: + + c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)} + + where $S$ is the set of sources, $T$ is the set of targets, + $\sigma(s, t)$ is the number of shortest $(s, t)$-paths, + and $\sigma(s, t|v)$ is the number of those paths + passing through some node $v$ other than $s, t$. + If $s = t$, $\sigma(s, t) = 1$, + and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_. + + + Parameters + ---------- + G : graph + A NetworkX graph. + + sources: list of nodes + Nodes to use as sources for shortest paths in betweenness + + targets: list of nodes + Nodes to use as targets for shortest paths in betweenness + + normalized : bool, optional + If True the betweenness values are normalized by $2/((n-1)(n-2))$ + for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$ + is the number of nodes in G. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + Weights are used to calculate weighted shortest paths, so they are + interpreted as distances. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with betweenness centrality as the value. + + See Also + -------- + edge_betweenness_centrality + load_centrality + + Notes + ----- + The basic algorithm is from [1]_. + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + The normalization might seem a little strange but it is + designed to make betweenness_centrality(G) be the same as + betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()). + + The total number of paths between source and target is counted + differently for directed and undirected graphs. Directed paths + are easy to count. Undirected paths are tricky: should a path + from "u" to "v" count as 1 undirected path or as 2 directed paths? + + For betweenness_centrality we report the number of undirected + paths when G is undirected. + + For betweenness_centrality_subset the reporting is different. + If the source and target subsets are the same, then we want + to count undirected paths. But if the source and target subsets + differ -- for example, if sources is {0} and targets is {1}, + then we are only counting the paths in one direction. They are + undirected paths but we are counting them in a directed way. + To count them as undirected paths, each should count as half a path. + + References + ---------- + .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality. + Journal of Mathematical Sociology 25(2):163-177, 2001. + https://doi.org/10.1080/0022250X.2001.9990249 + .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness + Centrality and their Generic Computation. + Social Networks 30(2):136-145, 2008. + https://doi.org/10.1016/j.socnet.2007.11.001 + """ + b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G + for s in sources: + # single source shortest paths + if weight is None: # use BFS + S, P, sigma, _ = shortest_path(G, s) + else: # use Dijkstra's algorithm + S, P, sigma, _ = dijkstra(G, s, weight) + b = _accumulate_subset(b, S, P, sigma, s, targets) + b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed()) + return b + + +@nx._dispatchable(edge_attrs="weight") +def edge_betweenness_centrality_subset( + G, sources, targets, normalized=False, weight=None +): + r"""Compute betweenness centrality for edges for a subset of nodes. + + .. math:: + + c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)} + + where $S$ is the set of sources, $T$ is the set of targets, + $\sigma(s, t)$ is the number of shortest $(s, t)$-paths, + and $\sigma(s, t|e)$ is the number of those paths + passing through edge $e$ [2]_. + + Parameters + ---------- + G : graph + A networkx graph. + + sources: list of nodes + Nodes to use as sources for shortest paths in betweenness + + targets: list of nodes + Nodes to use as targets for shortest paths in betweenness + + normalized : bool, optional + If True the betweenness values are normalized by `2/(n(n-1))` + for graphs, and `1/(n(n-1))` for directed graphs where `n` + is the number of nodes in G. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + Weights are used to calculate weighted shortest paths, so they are + interpreted as distances. + + Returns + ------- + edges : dictionary + Dictionary of edges with Betweenness centrality as the value. + + See Also + -------- + betweenness_centrality + edge_load + + Notes + ----- + The basic algorithm is from [1]_. + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + The normalization might seem a little strange but it is the same + as in edge_betweenness_centrality() and is designed to make + edge_betweenness_centrality(G) be the same as + edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()). + + References + ---------- + .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality. + Journal of Mathematical Sociology 25(2):163-177, 2001. + https://doi.org/10.1080/0022250X.2001.9990249 + .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness + Centrality and their Generic Computation. + Social Networks 30(2):136-145, 2008. + https://doi.org/10.1016/j.socnet.2007.11.001 + """ + b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G + b.update(dict.fromkeys(G.edges(), 0.0)) # b[e] for e in G.edges() + for s in sources: + # single source shortest paths + if weight is None: # use BFS + S, P, sigma, _ = shortest_path(G, s) + else: # use Dijkstra's algorithm + S, P, sigma, _ = dijkstra(G, s, weight) + b = _accumulate_edges_subset(b, S, P, sigma, s, targets) + for n in G: # remove nodes to only return edges + del b[n] + b = _rescale_e(b, len(G), normalized=normalized, directed=G.is_directed()) + if G.is_multigraph(): + b = _add_edge_keys(G, b, weight=weight) + return b + + +def _accumulate_subset(betweenness, S, P, sigma, s, targets): + delta = dict.fromkeys(S, 0.0) + target_set = set(targets) - {s} + while S: + w = S.pop() + if w in target_set: + coeff = (delta[w] + 1.0) / sigma[w] + else: + coeff = delta[w] / sigma[w] + for v in P[w]: + delta[v] += sigma[v] * coeff + if w != s: + betweenness[w] += delta[w] + return betweenness + + +def _accumulate_edges_subset(betweenness, S, P, sigma, s, targets): + """edge_betweenness_centrality_subset helper.""" + delta = dict.fromkeys(S, 0) + target_set = set(targets) + while S: + w = S.pop() + for v in P[w]: + if w in target_set: + c = (sigma[v] / sigma[w]) * (1.0 + delta[w]) + else: + c = delta[w] / len(P[w]) + if (v, w) not in betweenness: + betweenness[(w, v)] += c + else: + betweenness[(v, w)] += c + delta[v] += c + if w != s: + betweenness[w] += delta[w] + return betweenness + + +def _rescale(betweenness, n, normalized, directed=False): + """betweenness_centrality_subset helper.""" + if normalized: + if n <= 2: + scale = None # no normalization b=0 for all nodes + else: + scale = 1.0 / ((n - 1) * (n - 2)) + else: # rescale by 2 for undirected graphs + if not directed: + scale = 0.5 + else: + scale = None + if scale is not None: + for v in betweenness: + betweenness[v] *= scale + return betweenness + + +def _rescale_e(betweenness, n, normalized, directed=False): + """edge_betweenness_centrality_subset helper.""" + if normalized: + if n <= 1: + scale = None # no normalization b=0 for all nodes + else: + scale = 1.0 / (n * (n - 1)) + else: # rescale by 2 for undirected graphs + if not directed: + scale = 0.5 + else: + scale = None + if scale is not None: + for v in betweenness: + betweenness[v] *= scale + return betweenness diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/closeness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/closeness.py new file mode 100644 index 0000000..1cc2f95 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/closeness.py @@ -0,0 +1,282 @@ +""" +Closeness centrality measures. +""" + +import functools + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils.decorators import not_implemented_for + +__all__ = ["closeness_centrality", "incremental_closeness_centrality"] + + +@nx._dispatchable(edge_attrs="distance") +def closeness_centrality(G, u=None, distance=None, wf_improved=True): + r"""Compute closeness centrality for nodes. + + Closeness centrality [1]_ of a node `u` is the reciprocal of the + average shortest path distance to `u` over all `n-1` reachable nodes. + + .. math:: + + C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)}, + + where `d(v, u)` is the shortest-path distance between `v` and `u`, + and `n-1` is the number of nodes reachable from `u`. Notice that the + closeness distance function computes the incoming distance to `u` + for directed graphs. To use outward distance, act on `G.reverse()`. + + Notice that higher values of closeness indicate higher centrality. + + Wasserman and Faust propose an improved formula for graphs with + more than one connected component. The result is "a ratio of the + fraction of actors in the group who are reachable, to the average + distance" from the reachable actors [2]_. You might think this + scale factor is inverted but it is not. As is, nodes from small + components receive a smaller closeness value. Letting `N` denote + the number of nodes in the graph, + + .. math:: + + C_{WF}(u) = \frac{n-1}{N-1} \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)}, + + Parameters + ---------- + G : graph + A NetworkX graph + + u : node, optional + Return only the value for node u + + distance : edge attribute key, optional (default=None) + Use the specified edge attribute as the edge distance in shortest + path calculations. If `None` (the default) all edges have a distance of 1. + Absent edge attributes are assigned a distance of 1. Note that no check + is performed to ensure that edges have the provided attribute. + + wf_improved : bool, optional (default=True) + If True, scale by the fraction of nodes reachable. This gives the + Wasserman and Faust improved formula. For single component graphs + it is the same as the original formula. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with closeness centrality as the value. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> nx.closeness_centrality(G) + {0: 1.0, 1: 1.0, 2: 0.75, 3: 0.75} + + See Also + -------- + betweenness_centrality, load_centrality, eigenvector_centrality, + degree_centrality, incremental_closeness_centrality + + Notes + ----- + The closeness centrality is normalized to `(n-1)/(|G|-1)` where + `n` is the number of nodes in the connected part of graph + containing the node. If the graph is not completely connected, + this algorithm computes the closeness centrality for each + connected part separately scaled by that parts size. + + If the 'distance' keyword is set to an edge attribute key then the + shortest-path length will be computed using Dijkstra's algorithm with + that edge attribute as the edge weight. + + The closeness centrality uses *inward* distance to a node, not outward. + If you want to use outword distances apply the function to `G.reverse()` + + In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the + outward distance rather than the inward distance. If you use a 'distance' + keyword and a DiGraph, your results will change between v2.2 and v2.3. + + References + ---------- + .. [1] Linton C. Freeman: Centrality in networks: I. + Conceptual clarification. Social Networks 1:215-239, 1979. + https://doi.org/10.1016/0378-8733(78)90021-7 + .. [2] pg. 201 of Wasserman, S. and Faust, K., + Social Network Analysis: Methods and Applications, 1994, + Cambridge University Press. + """ + if G.is_directed(): + G = G.reverse() # create a reversed graph view + + if distance is not None: + # use Dijkstra's algorithm with specified attribute as edge weight + path_length = functools.partial( + nx.single_source_dijkstra_path_length, weight=distance + ) + else: + path_length = nx.single_source_shortest_path_length + + if u is None: + nodes = G.nodes + else: + nodes = [u] + closeness_dict = {} + for n in nodes: + sp = path_length(G, n) + totsp = sum(sp.values()) + len_G = len(G) + _closeness_centrality = 0.0 + if totsp > 0.0 and len_G > 1: + _closeness_centrality = (len(sp) - 1.0) / totsp + # normalize to number of nodes-1 in connected part + if wf_improved: + s = (len(sp) - 1.0) / (len_G - 1) + _closeness_centrality *= s + closeness_dict[n] = _closeness_centrality + if u is not None: + return closeness_dict[u] + return closeness_dict + + +@not_implemented_for("directed") +@nx._dispatchable(mutates_input=True) +def incremental_closeness_centrality( + G, edge, prev_cc=None, insertion=True, wf_improved=True +): + r"""Incremental closeness centrality for nodes. + + Compute closeness centrality for nodes using level-based work filtering + as described in Incremental Algorithms for Closeness Centrality by Sariyuce et al. + + Level-based work filtering detects unnecessary updates to the closeness + centrality and filters them out. + + --- + From "Incremental Algorithms for Closeness Centrality": + + Theorem 1: Let :math:`G = (V, E)` be a graph and u and v be two vertices in V + such that there is no edge (u, v) in E. Let :math:`G' = (V, E \cup uv)` + Then :math:`cc[s] = cc'[s]` if and only if :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. + + Where :math:`dG(u, v)` denotes the length of the shortest path between + two vertices u, v in a graph G, cc[s] is the closeness centrality for a + vertex s in V, and cc'[s] is the closeness centrality for a + vertex s in V, with the (u, v) edge added. + --- + + We use Theorem 1 to filter out updates when adding or removing an edge. + When adding an edge (u, v), we compute the shortest path lengths from all + other nodes to u and to v before the node is added. When removing an edge, + we compute the shortest path lengths after the edge is removed. Then we + apply Theorem 1 to use previously computed closeness centrality for nodes + where :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. This works only for + undirected, unweighted graphs; the distance argument is not supported. + + Closeness centrality [1]_ of a node `u` is the reciprocal of the + sum of the shortest path distances from `u` to all `n-1` other nodes. + Since the sum of distances depends on the number of nodes in the + graph, closeness is normalized by the sum of minimum possible + distances `n-1`. + + .. math:: + + C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)}, + + where `d(v, u)` is the shortest-path distance between `v` and `u`, + and `n` is the number of nodes in the graph. + + Notice that higher values of closeness indicate higher centrality. + + Parameters + ---------- + G : graph + A NetworkX graph + + edge : tuple + The modified edge (u, v) in the graph. + + prev_cc : dictionary + The previous closeness centrality for all nodes in the graph. + + insertion : bool, optional + If True (default) the edge was inserted, otherwise it was deleted from the graph. + + wf_improved : bool, optional (default=True) + If True, scale by the fraction of nodes reachable. This gives the + Wasserman and Faust improved formula. For single component graphs + it is the same as the original formula. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with closeness centrality as the value. + + See Also + -------- + betweenness_centrality, load_centrality, eigenvector_centrality, + degree_centrality, closeness_centrality + + Notes + ----- + The closeness centrality is normalized to `(n-1)/(|G|-1)` where + `n` is the number of nodes in the connected part of graph + containing the node. If the graph is not completely connected, + this algorithm computes the closeness centrality for each + connected part separately. + + References + ---------- + .. [1] Freeman, L.C., 1979. Centrality in networks: I. + Conceptual clarification. Social Networks 1, 215--239. + https://doi.org/10.1016/0378-8733(78)90021-7 + .. [2] Sariyuce, A.E. ; Kaya, K. ; Saule, E. ; Catalyiirek, U.V. Incremental + Algorithms for Closeness Centrality. 2013 IEEE International Conference on Big Data + http://sariyuce.com/papers/bigdata13.pdf + """ + if prev_cc is not None and set(prev_cc.keys()) != set(G.nodes()): + raise NetworkXError("prev_cc and G do not have the same nodes") + + # Unpack edge + (u, v) = edge + path_length = nx.single_source_shortest_path_length + + if insertion: + # For edge insertion, we want shortest paths before the edge is inserted + du = path_length(G, u) + dv = path_length(G, v) + + G.add_edge(u, v) + else: + G.remove_edge(u, v) + + # For edge removal, we want shortest paths after the edge is removed + du = path_length(G, u) + dv = path_length(G, v) + + if prev_cc is None: + return nx.closeness_centrality(G) + + nodes = G.nodes() + closeness_dict = {} + for n in nodes: + if n in du and n in dv and abs(du[n] - dv[n]) <= 1: + closeness_dict[n] = prev_cc[n] + else: + sp = path_length(G, n) + totsp = sum(sp.values()) + len_G = len(G) + _closeness_centrality = 0.0 + if totsp > 0.0 and len_G > 1: + _closeness_centrality = (len(sp) - 1.0) / totsp + # normalize to number of nodes-1 in connected part + if wf_improved: + s = (len(sp) - 1.0) / (len_G - 1) + _closeness_centrality *= s + closeness_dict[n] = _closeness_centrality + + # Leave the graph as we found it + if insertion: + G.remove_edge(u, v) + else: + G.add_edge(u, v) + + return closeness_dict diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py new file mode 100644 index 0000000..bfde279 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness.py @@ -0,0 +1,342 @@ +"""Current-flow betweenness centrality measures.""" + +import networkx as nx +from networkx.algorithms.centrality.flow_matrix import ( + CGInverseLaplacian, + FullInverseLaplacian, + SuperLUInverseLaplacian, + flow_matrix_row, +) +from networkx.utils import ( + not_implemented_for, + py_random_state, + reverse_cuthill_mckee_ordering, +) + +__all__ = [ + "current_flow_betweenness_centrality", + "approximate_current_flow_betweenness_centrality", + "edge_current_flow_betweenness_centrality", +] + + +@not_implemented_for("directed") +@py_random_state(7) +@nx._dispatchable(edge_attrs="weight") +def approximate_current_flow_betweenness_centrality( + G, + normalized=True, + weight=None, + dtype=float, + solver="full", + epsilon=0.5, + kmax=10000, + seed=None, +): + r"""Compute the approximate current-flow betweenness centrality for nodes. + + Approximates the current-flow betweenness centrality within absolute + error of epsilon with high probability [1]_. + + + Parameters + ---------- + G : graph + A NetworkX graph + + normalized : bool, optional (default=True) + If True the betweenness values are normalized by 2/[(n-1)(n-2)] where + n is the number of nodes in G. + + weight : string or None, optional (default=None) + Key for edge data used as the edge weight. + If None, then use 1 as each edge weight. + The weight reflects the capacity or the strength of the + edge. + + dtype : data type (float) + Default data type for internal matrices. + Set to np.float32 for lower memory consumption. + + solver : string (default='full') + Type of linear solver to use for computing the flow matrix. + Options are "full" (uses most memory), "lu" (recommended), and + "cg" (uses least memory). + + epsilon: float + Absolute error tolerance. + + kmax: int + Maximum number of sample node pairs to use for approximation. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with betweenness centrality as the value. + + See Also + -------- + current_flow_betweenness_centrality + + Notes + ----- + The running time is $O((1/\epsilon^2)m{\sqrt k} \log n)$ + and the space required is $O(m)$ for $n$ nodes and $m$ edges. + + If the edges have a 'weight' attribute they will be used as + weights in this algorithm. Unspecified weights are set to 1. + + References + ---------- + .. [1] Ulrik Brandes and Daniel Fleischer: + Centrality Measures Based on Current Flow. + Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). + LNCS 3404, pp. 533-544. Springer-Verlag, 2005. + https://doi.org/10.1007/978-3-540-31856-9_44 + """ + import numpy as np + + if not nx.is_connected(G): + raise nx.NetworkXError("Graph not connected.") + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } + n = G.number_of_nodes() + ordering = list(reverse_cuthill_mckee_ordering(G)) + # make a copy with integer labels according to rcm ordering + # this could be done without a copy if we really wanted to + H = nx.relabel_nodes(G, dict(zip(ordering, range(n)))) + L = nx.laplacian_matrix(H, nodelist=range(n), weight=weight).asformat("csc") + L = L.astype(dtype) + C = solvername[solver](L, dtype=dtype) # initialize solver + betweenness = dict.fromkeys(H, 0.0) + nb = (n - 1.0) * (n - 2.0) # normalization factor + cstar = n * (n - 1) / nb + l = 1 # parameter in approximation, adjustable + k = l * int(np.ceil((cstar / epsilon) ** 2 * np.log(n))) + if k > kmax: + msg = f"Number random pairs k>kmax ({k}>{kmax}) " + raise nx.NetworkXError(msg, "Increase kmax or epsilon") + cstar2k = cstar / (2 * k) + for _ in range(k): + s, t = pair = seed.sample(range(n), 2) + b = np.zeros(n, dtype=dtype) + b[s] = 1 + b[t] = -1 + p = C.solve(b) + for v in H: + if v in pair: + continue + for nbr in H[v]: + w = H[v][nbr].get(weight, 1.0) + betweenness[v] += float(w * np.abs(p[v] - p[nbr]) * cstar2k) + if normalized: + factor = 1.0 + else: + factor = nb / 2.0 + # remap to original node names and "unnormalize" if required + return {ordering[k]: v * factor for k, v in betweenness.items()} + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def current_flow_betweenness_centrality( + G, normalized=True, weight=None, dtype=float, solver="full" +): + r"""Compute current-flow betweenness centrality for nodes. + + Current-flow betweenness centrality uses an electrical current + model for information spreading in contrast to betweenness + centrality which uses shortest paths. + + Current-flow betweenness centrality is also known as + random-walk betweenness centrality [2]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + normalized : bool, optional (default=True) + If True the betweenness values are normalized by 2/[(n-1)(n-2)] where + n is the number of nodes in G. + + weight : string or None, optional (default=None) + Key for edge data used as the edge weight. + If None, then use 1 as each edge weight. + The weight reflects the capacity or the strength of the + edge. + + dtype : data type (float) + Default data type for internal matrices. + Set to np.float32 for lower memory consumption. + + solver : string (default='full') + Type of linear solver to use for computing the flow matrix. + Options are "full" (uses most memory), "lu" (recommended), and + "cg" (uses least memory). + + Returns + ------- + nodes : dictionary + Dictionary of nodes with betweenness centrality as the value. + + See Also + -------- + approximate_current_flow_betweenness_centrality + betweenness_centrality + edge_betweenness_centrality + edge_current_flow_betweenness_centrality + + Notes + ----- + Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$ + time [1]_, where $I(n-1)$ is the time needed to compute the + inverse Laplacian. For a full matrix this is $O(n^3)$ but using + sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the + Laplacian matrix condition number. + + The space required is $O(nw)$ where $w$ is the width of the sparse + Laplacian matrix. Worse case is $w=n$ for $O(n^2)$. + + If the edges have a 'weight' attribute they will be used as + weights in this algorithm. Unspecified weights are set to 1. + + References + ---------- + .. [1] Centrality Measures Based on Current Flow. + Ulrik Brandes and Daniel Fleischer, + Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). + LNCS 3404, pp. 533-544. Springer-Verlag, 2005. + https://doi.org/10.1007/978-3-540-31856-9_44 + + .. [2] A measure of betweenness centrality based on random walks, + M. E. J. Newman, Social Networks 27, 39-54 (2005). + """ + if not nx.is_connected(G): + raise nx.NetworkXError("Graph not connected.") + N = G.number_of_nodes() + ordering = list(reverse_cuthill_mckee_ordering(G)) + # make a copy with integer labels according to rcm ordering + # this could be done without a copy if we really wanted to + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H + for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): + pos = dict(zip(row.argsort()[::-1], range(N))) + for i in range(N): + betweenness[s] += (i - pos[i]) * row.item(i) + betweenness[t] += (N - i - 1 - pos[i]) * row.item(i) + if normalized: + nb = (N - 1.0) * (N - 2.0) # normalization factor + else: + nb = 2.0 + return {ordering[n]: (b - n) * 2.0 / nb for n, b in betweenness.items()} + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def edge_current_flow_betweenness_centrality( + G, normalized=True, weight=None, dtype=float, solver="full" +): + r"""Compute current-flow betweenness centrality for edges. + + Current-flow betweenness centrality uses an electrical current + model for information spreading in contrast to betweenness + centrality which uses shortest paths. + + Current-flow betweenness centrality is also known as + random-walk betweenness centrality [2]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + normalized : bool, optional (default=True) + If True the betweenness values are normalized by 2/[(n-1)(n-2)] where + n is the number of nodes in G. + + weight : string or None, optional (default=None) + Key for edge data used as the edge weight. + If None, then use 1 as each edge weight. + The weight reflects the capacity or the strength of the + edge. + + dtype : data type (default=float) + Default data type for internal matrices. + Set to np.float32 for lower memory consumption. + + solver : string (default='full') + Type of linear solver to use for computing the flow matrix. + Options are "full" (uses most memory), "lu" (recommended), and + "cg" (uses least memory). + + Returns + ------- + nodes : dictionary + Dictionary of edge tuples with betweenness centrality as the value. + + Raises + ------ + NetworkXError + The algorithm does not support DiGraphs. + If the input graph is an instance of DiGraph class, NetworkXError + is raised. + + See Also + -------- + betweenness_centrality + edge_betweenness_centrality + current_flow_betweenness_centrality + + Notes + ----- + Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$ + time [1]_, where $I(n-1)$ is the time needed to compute the + inverse Laplacian. For a full matrix this is $O(n^3)$ but using + sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the + Laplacian matrix condition number. + + The space required is $O(nw)$ where $w$ is the width of the sparse + Laplacian matrix. Worse case is $w=n$ for $O(n^2)$. + + If the edges have a 'weight' attribute they will be used as + weights in this algorithm. Unspecified weights are set to 1. + + References + ---------- + .. [1] Centrality Measures Based on Current Flow. + Ulrik Brandes and Daniel Fleischer, + Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). + LNCS 3404, pp. 533-544. Springer-Verlag, 2005. + https://doi.org/10.1007/978-3-540-31856-9_44 + + .. [2] A measure of betweenness centrality based on random walks, + M. E. J. Newman, Social Networks 27, 39-54 (2005). + """ + if not nx.is_connected(G): + raise nx.NetworkXError("Graph not connected.") + N = G.number_of_nodes() + ordering = list(reverse_cuthill_mckee_ordering(G)) + # make a copy with integer labels according to rcm ordering + # this could be done without a copy if we really wanted to + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) + edges = (tuple(sorted((u, v))) for u, v in H.edges()) + betweenness = dict.fromkeys(edges, 0.0) + if normalized: + nb = (N - 1.0) * (N - 2.0) # normalization factor + else: + nb = 2.0 + for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): + pos = dict(zip(row.argsort()[::-1], range(1, N + 1))) + for i in range(N): + betweenness[e] += (i + 1 - pos[i]) * row.item(i) + betweenness[e] += (N - i - pos[i]) * row.item(i) + betweenness[e] /= nb + return {(ordering[s], ordering[t]): b for (s, t), b in betweenness.items()} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py new file mode 100644 index 0000000..911718c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_betweenness_subset.py @@ -0,0 +1,227 @@ +"""Current-flow betweenness centrality measures for subsets of nodes.""" + +import networkx as nx +from networkx.algorithms.centrality.flow_matrix import flow_matrix_row +from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering + +__all__ = [ + "current_flow_betweenness_centrality_subset", + "edge_current_flow_betweenness_centrality_subset", +] + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def current_flow_betweenness_centrality_subset( + G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" +): + r"""Compute current-flow betweenness centrality for subsets of nodes. + + Current-flow betweenness centrality uses an electrical current + model for information spreading in contrast to betweenness + centrality which uses shortest paths. + + Current-flow betweenness centrality is also known as + random-walk betweenness centrality [2]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + sources: list of nodes + Nodes to use as sources for current + + targets: list of nodes + Nodes to use as sinks for current + + normalized : bool, optional (default=True) + If True the betweenness values are normalized by b=b/(n-1)(n-2) where + n is the number of nodes in G. + + weight : string or None, optional (default=None) + Key for edge data used as the edge weight. + If None, then use 1 as each edge weight. + The weight reflects the capacity or the strength of the + edge. + + dtype: data type (float) + Default data type for internal matrices. + Set to np.float32 for lower memory consumption. + + solver: string (default='lu') + Type of linear solver to use for computing the flow matrix. + Options are "full" (uses most memory), "lu" (recommended), and + "cg" (uses least memory). + + Returns + ------- + nodes : dictionary + Dictionary of nodes with betweenness centrality as the value. + + See Also + -------- + approximate_current_flow_betweenness_centrality + betweenness_centrality + edge_betweenness_centrality + edge_current_flow_betweenness_centrality + + Notes + ----- + Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$ + time [1]_, where $I(n-1)$ is the time needed to compute the + inverse Laplacian. For a full matrix this is $O(n^3)$ but using + sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the + Laplacian matrix condition number. + + The space required is $O(nw)$ where $w$ is the width of the sparse + Laplacian matrix. Worse case is $w=n$ for $O(n^2)$. + + If the edges have a 'weight' attribute they will be used as + weights in this algorithm. Unspecified weights are set to 1. + + References + ---------- + .. [1] Centrality Measures Based on Current Flow. + Ulrik Brandes and Daniel Fleischer, + Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). + LNCS 3404, pp. 533-544. Springer-Verlag, 2005. + https://doi.org/10.1007/978-3-540-31856-9_44 + + .. [2] A measure of betweenness centrality based on random walks, + M. E. J. Newman, Social Networks 27, 39-54 (2005). + """ + import numpy as np + + from networkx.utils import reverse_cuthill_mckee_ordering + + if not nx.is_connected(G): + raise nx.NetworkXError("Graph not connected.") + N = G.number_of_nodes() + ordering = list(reverse_cuthill_mckee_ordering(G)) + # make a copy with integer labels according to rcm ordering + # this could be done without a copy if we really wanted to + mapping = dict(zip(ordering, range(N))) + H = nx.relabel_nodes(G, mapping) + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H + for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): + for ss in sources: + i = mapping[ss] + for tt in targets: + j = mapping[tt] + betweenness[s] += 0.5 * abs(row.item(i) - row.item(j)) + betweenness[t] += 0.5 * abs(row.item(i) - row.item(j)) + if normalized: + nb = (N - 1.0) * (N - 2.0) # normalization factor + else: + nb = 2.0 + for node in H: + betweenness[node] = betweenness[node] / nb + 1.0 / (2 - N) + return {ordering[node]: value for node, value in betweenness.items()} + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def edge_current_flow_betweenness_centrality_subset( + G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu" +): + r"""Compute current-flow betweenness centrality for edges using subsets + of nodes. + + Current-flow betweenness centrality uses an electrical current + model for information spreading in contrast to betweenness + centrality which uses shortest paths. + + Current-flow betweenness centrality is also known as + random-walk betweenness centrality [2]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + sources: list of nodes + Nodes to use as sources for current + + targets: list of nodes + Nodes to use as sinks for current + + normalized : bool, optional (default=True) + If True the betweenness values are normalized by b=b/(n-1)(n-2) where + n is the number of nodes in G. + + weight : string or None, optional (default=None) + Key for edge data used as the edge weight. + If None, then use 1 as each edge weight. + The weight reflects the capacity or the strength of the + edge. + + dtype: data type (float) + Default data type for internal matrices. + Set to np.float32 for lower memory consumption. + + solver: string (default='lu') + Type of linear solver to use for computing the flow matrix. + Options are "full" (uses most memory), "lu" (recommended), and + "cg" (uses least memory). + + Returns + ------- + nodes : dict + Dictionary of edge tuples with betweenness centrality as the value. + + See Also + -------- + betweenness_centrality + edge_betweenness_centrality + current_flow_betweenness_centrality + + Notes + ----- + Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$ + time [1]_, where $I(n-1)$ is the time needed to compute the + inverse Laplacian. For a full matrix this is $O(n^3)$ but using + sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the + Laplacian matrix condition number. + + The space required is $O(nw)$ where $w$ is the width of the sparse + Laplacian matrix. Worse case is $w=n$ for $O(n^2)$. + + If the edges have a 'weight' attribute they will be used as + weights in this algorithm. Unspecified weights are set to 1. + + References + ---------- + .. [1] Centrality Measures Based on Current Flow. + Ulrik Brandes and Daniel Fleischer, + Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). + LNCS 3404, pp. 533-544. Springer-Verlag, 2005. + https://doi.org/10.1007/978-3-540-31856-9_44 + + .. [2] A measure of betweenness centrality based on random walks, + M. E. J. Newman, Social Networks 27, 39-54 (2005). + """ + import numpy as np + + if not nx.is_connected(G): + raise nx.NetworkXError("Graph not connected.") + N = G.number_of_nodes() + ordering = list(reverse_cuthill_mckee_ordering(G)) + # make a copy with integer labels according to rcm ordering + # this could be done without a copy if we really wanted to + mapping = dict(zip(ordering, range(N))) + H = nx.relabel_nodes(G, mapping) + edges = (tuple(sorted((u, v))) for u, v in H.edges()) + betweenness = dict.fromkeys(edges, 0.0) + if normalized: + nb = (N - 1.0) * (N - 2.0) # normalization factor + else: + nb = 2.0 + for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): + for ss in sources: + i = mapping[ss] + for tt in targets: + j = mapping[tt] + betweenness[e] += 0.5 * abs(row.item(i) - row.item(j)) + betweenness[e] /= nb + return {(ordering[s], ordering[t]): value for (s, t), value in betweenness.items()} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_closeness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_closeness.py new file mode 100644 index 0000000..67f8639 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/current_flow_closeness.py @@ -0,0 +1,96 @@ +"""Current-flow closeness centrality measures.""" + +import networkx as nx +from networkx.algorithms.centrality.flow_matrix import ( + CGInverseLaplacian, + FullInverseLaplacian, + SuperLUInverseLaplacian, +) +from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering + +__all__ = ["current_flow_closeness_centrality", "information_centrality"] + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"): + """Compute current-flow closeness centrality for nodes. + + Current-flow closeness centrality is variant of closeness + centrality based on effective resistance between nodes in + a network. This metric is also known as information centrality. + + Parameters + ---------- + G : graph + A NetworkX graph. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + The weight reflects the capacity or the strength of the + edge. + + dtype: data type (default=float) + Default data type for internal matrices. + Set to np.float32 for lower memory consumption. + + solver: string (default='lu') + Type of linear solver to use for computing the flow matrix. + Options are "full" (uses most memory), "lu" (recommended), and + "cg" (uses least memory). + + Returns + ------- + nodes : dictionary + Dictionary of nodes with current flow closeness centrality as the value. + + See Also + -------- + closeness_centrality + + Notes + ----- + The algorithm is from Brandes [1]_. + + See also [2]_ for the original definition of information centrality. + + References + ---------- + .. [1] Ulrik Brandes and Daniel Fleischer, + Centrality Measures Based on Current Flow. + Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). + LNCS 3404, pp. 533-544. Springer-Verlag, 2005. + https://doi.org/10.1007/978-3-540-31856-9_44 + + .. [2] Karen Stephenson and Marvin Zelen: + Rethinking centrality: Methods and examples. + Social Networks 11(1):1-37, 1989. + https://doi.org/10.1016/0378-8733(89)90016-6 + """ + if not nx.is_connected(G): + raise nx.NetworkXError("Graph not connected.") + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } + N = G.number_of_nodes() + ordering = list(reverse_cuthill_mckee_ordering(G)) + # make a copy with integer labels according to rcm ordering + # this could be done without a copy if we really wanted to + H = nx.relabel_nodes(G, dict(zip(ordering, range(N)))) + betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H + N = H.number_of_nodes() + L = nx.laplacian_matrix(H, nodelist=range(N), weight=weight).asformat("csc") + L = L.astype(dtype) + C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver + for v in H: + col = C2.get_row(v) + for w in H: + betweenness[v] += col.item(v) - 2 * col.item(w) + betweenness[w] += col.item(v) + return {ordering[node]: 1 / value for node, value in betweenness.items()} + + +information_centrality = current_flow_closeness_centrality diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/degree_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/degree_alg.py new file mode 100644 index 0000000..395f1ac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/degree_alg.py @@ -0,0 +1,150 @@ +"""Degree centrality measures.""" + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + +__all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"] + + +@nx._dispatchable +def degree_centrality(G): + """Compute the degree centrality for nodes. + + The degree centrality for a node v is the fraction of nodes it + is connected to. + + Parameters + ---------- + G : graph + A networkx graph + + Returns + ------- + nodes : dictionary + Dictionary of nodes with degree centrality as the value. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> nx.degree_centrality(G) + {0: 1.0, 1: 1.0, 2: 0.6666666666666666, 3: 0.6666666666666666} + + See Also + -------- + betweenness_centrality, load_centrality, eigenvector_centrality + + Notes + ----- + The degree centrality values are normalized by dividing by the maximum + possible degree in a simple graph n-1 where n is the number of nodes in G. + + For multigraphs or graphs with self loops the maximum degree might + be higher than n-1 and values of degree centrality greater than 1 + are possible. + """ + if len(G) <= 1: + return dict.fromkeys(G, 1) + + s = 1.0 / (len(G) - 1.0) + centrality = {n: d * s for n, d in G.degree()} + return centrality + + +@not_implemented_for("undirected") +@nx._dispatchable +def in_degree_centrality(G): + """Compute the in-degree centrality for nodes. + + The in-degree centrality for a node v is the fraction of nodes its + incoming edges are connected to. + + Parameters + ---------- + G : graph + A NetworkX graph + + Returns + ------- + nodes : dictionary + Dictionary of nodes with in-degree centrality as values. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> nx.in_degree_centrality(G) + {0: 0.0, 1: 0.3333333333333333, 2: 0.6666666666666666, 3: 0.6666666666666666} + + See Also + -------- + degree_centrality, out_degree_centrality + + Notes + ----- + The degree centrality values are normalized by dividing by the maximum + possible degree in a simple graph n-1 where n is the number of nodes in G. + + For multigraphs or graphs with self loops the maximum degree might + be higher than n-1 and values of degree centrality greater than 1 + are possible. + """ + if len(G) <= 1: + return dict.fromkeys(G, 1) + + s = 1.0 / (len(G) - 1.0) + centrality = {n: d * s for n, d in G.in_degree()} + return centrality + + +@not_implemented_for("undirected") +@nx._dispatchable +def out_degree_centrality(G): + """Compute the out-degree centrality for nodes. + + The out-degree centrality for a node v is the fraction of nodes its + outgoing edges are connected to. + + Parameters + ---------- + G : graph + A NetworkX graph + + Returns + ------- + nodes : dictionary + Dictionary of nodes with out-degree centrality as values. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> nx.out_degree_centrality(G) + {0: 1.0, 1: 0.6666666666666666, 2: 0.0, 3: 0.0} + + See Also + -------- + degree_centrality, in_degree_centrality + + Notes + ----- + The degree centrality values are normalized by dividing by the maximum + possible degree in a simple graph n-1 where n is the number of nodes in G. + + For multigraphs or graphs with self loops the maximum degree might + be higher than n-1 and values of degree centrality greater than 1 + are possible. + """ + if len(G) <= 1: + return dict.fromkeys(G, 1) + + s = 1.0 / (len(G) - 1.0) + centrality = {n: d * s for n, d in G.out_degree()} + return centrality diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/dispersion.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/dispersion.py new file mode 100644 index 0000000..a3fa685 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/dispersion.py @@ -0,0 +1,107 @@ +from itertools import combinations + +import networkx as nx + +__all__ = ["dispersion"] + + +@nx._dispatchable +def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0): + r"""Calculate dispersion between `u` and `v` in `G`. + + A link between two actors (`u` and `v`) has a high dispersion when their + mutual ties (`s` and `t`) are not well connected with each other. + + Parameters + ---------- + G : graph + A NetworkX graph. + u : node, optional + The source for the dispersion score (e.g. ego node of the network). + v : node, optional + The target of the dispersion score if specified. + normalized : bool + If True (default) normalize by the embeddedness of the nodes (u and v). + alpha, b, c : float + Parameters for the normalization procedure. When `normalized` is True, + the dispersion value is normalized by:: + + result = ((dispersion + b) ** alpha) / (embeddedness + c) + + as long as the denominator is nonzero. + + Returns + ------- + nodes : dictionary + If u (v) is specified, returns a dictionary of nodes with dispersion + score for all "target" ("source") nodes. If neither u nor v is + specified, returns a dictionary of dictionaries for all nodes 'u' in the + graph with a dispersion score for each node 'v'. + + Notes + ----- + This implementation follows Lars Backstrom and Jon Kleinberg [1]_. Typical + usage would be to run dispersion on the ego network $G_u$ if $u$ were + specified. Running :func:`dispersion` with neither $u$ nor $v$ specified + can take some time to complete. + + References + ---------- + .. [1] Romantic Partnerships and the Dispersion of Social Ties: + A Network Analysis of Relationship Status on Facebook. + Lars Backstrom, Jon Kleinberg. + https://arxiv.org/pdf/1310.6753v1.pdf + + """ + + def _dispersion(G_u, u, v): + """dispersion for all nodes 'v' in a ego network G_u of node 'u'""" + u_nbrs = set(G_u[u]) + ST = {n for n in G_u[v] if n in u_nbrs} + set_uv = {u, v} + # all possible ties of connections that u and b share + possib = combinations(ST, 2) + total = 0 + for s, t in possib: + # neighbors of s that are in G_u, not including u and v + nbrs_s = u_nbrs.intersection(G_u[s]) - set_uv + # s and t are not directly connected + if t not in nbrs_s: + # s and t do not share a connection + if nbrs_s.isdisjoint(G_u[t]): + # tick for disp(u, v) + total += 1 + # neighbors that u and v share + embeddedness = len(ST) + + dispersion_val = total + if normalized: + dispersion_val = (total + b) ** alpha + if embeddedness + c != 0: + dispersion_val /= embeddedness + c + + return dispersion_val + + if u is None: + # v and u are not specified + if v is None: + results = {n: {} for n in G} + for u in G: + for v in G[u]: + results[u][v] = _dispersion(G, u, v) + # u is not specified, but v is + else: + results = dict.fromkeys(G[v], {}) + for u in G[v]: + results[u] = _dispersion(G, v, u) + else: + # u is specified with no target v + if v is None: + results = dict.fromkeys(G[u], {}) + for v in G[u]: + results[v] = _dispersion(G, u, v) + # both u and v are specified + else: + results = _dispersion(G, u, v) + + return results diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/eigenvector.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/eigenvector.py new file mode 100644 index 0000000..0bfb974 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/eigenvector.py @@ -0,0 +1,357 @@ +"""Functions for computing eigenvector centrality.""" + +import math + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"] + + +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None): + r"""Compute the eigenvector centrality for the graph G. + + Eigenvector centrality computes the centrality for a node by adding + the centrality of its predecessors. The centrality for node $i$ is the + $i$-th element of a left eigenvector associated with the eigenvalue $\lambda$ + of maximum modulus that is positive. Such an eigenvector $x$ is + defined up to a multiplicative constant by the equation + + .. math:: + + \lambda x^T = x^T A, + + where $A$ is the adjacency matrix of the graph G. By definition of + row-column product, the equation above is equivalent to + + .. math:: + + \lambda x_i = \sum_{j\to i}x_j. + + That is, adding the eigenvector centralities of the predecessors of + $i$ one obtains the eigenvector centrality of $i$ multiplied by + $\lambda$. In the case of undirected graphs, $x$ also solves the familiar + right-eigenvector equation $Ax = \lambda x$. + + By virtue of the Perron–Frobenius theorem [1]_, if G is strongly + connected there is a unique eigenvector $x$, and all its entries + are strictly positive. + + If G is not strongly connected there might be several left + eigenvectors associated with $\lambda$, and some of their elements + might be zero. + + Parameters + ---------- + G : graph + A networkx graph. + + max_iter : integer, optional (default=100) + Maximum number of power iterations. + + tol : float, optional (default=1.0e-6) + Error tolerance (in Euclidean norm) used to check convergence in + power iteration. + + nstart : dictionary, optional (default=None) + Starting value of power iteration for each node. Must have a nonzero + projection on the desired eigenvector for the power method to converge. + If None, this implementation uses an all-ones vector, which is a safe + choice. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. Otherwise holds the + name of the edge attribute used as weight. In this measure the + weight is interpreted as the connection strength. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with eigenvector centrality as the value. The + associated vector has unit Euclidean norm and the values are + nonegative. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> centrality = nx.eigenvector_centrality(G) + >>> sorted((v, f"{c:0.2f}") for v, c in centrality.items()) + [(0, '0.37'), (1, '0.60'), (2, '0.60'), (3, '0.37')] + + Raises + ------ + NetworkXPointlessConcept + If the graph G is the null graph. + + NetworkXError + If each value in `nstart` is zero. + + PowerIterationFailedConvergence + If the algorithm fails to converge to the specified tolerance + within the specified number of iterations of the power iteration + method. + + See Also + -------- + eigenvector_centrality_numpy + :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank` + :func:`~networkx.algorithms.link_analysis.hits_alg.hits` + + Notes + ----- + Eigenvector centrality was introduced by Landau [2]_ for chess + tournaments. It was later rediscovered by Wei [3]_ and then + popularized by Kendall [4]_ in the context of sport ranking. Berge + introduced a general definition for graphs based on social connections + [5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made + it popular in link analysis. + + This function computes the left dominant eigenvector, which corresponds + to adding the centrality of predecessors: this is the usual approach. + To add the centrality of successors first reverse the graph with + ``G.reverse()``. + + The implementation uses power iteration [7]_ to compute a dominant + eigenvector starting from the provided vector `nstart`. Convergence is + guaranteed as long as `nstart` has a nonzero projection on a dominant + eigenvector, which certainly happens using the default value. + + The method stops when the change in the computed vector between two + iterations is smaller than an error tolerance of ``G.number_of_nodes() + * tol`` or after ``max_iter`` iterations, but in the second case it + raises an exception. + + This implementation uses $(A + I)$ rather than the adjacency matrix + $A$ because the change preserves eigenvectors, but it shifts the + spectrum, thus guaranteeing convergence even for networks with + negative eigenvalues of maximum modulus. + + References + ---------- + .. [1] Abraham Berman and Robert J. Plemmons. + "Nonnegative Matrices in the Mathematical Sciences." + Classics in Applied Mathematics. SIAM, 1994. + + .. [2] Edmund Landau. + "Zur relativen Wertbemessung der Turnierresultate." + Deutsches Wochenschach, 11:366–369, 1895. + + .. [3] Teh-Hsing Wei. + "The Algebraic Foundations of Ranking Theory." + PhD thesis, University of Cambridge, 1952. + + .. [4] Maurice G. Kendall. + "Further contributions to the theory of paired comparisons." + Biometrics, 11(1):43–62, 1955. + https://www.jstor.org/stable/3001479 + + .. [5] Claude Berge + "Théorie des graphes et ses applications." + Dunod, Paris, France, 1958. + + .. [6] Phillip Bonacich. + "Technique for analyzing overlapping memberships." + Sociological Methodology, 4:176–185, 1972. + https://www.jstor.org/stable/270732 + + .. [7] Power iteration:: https://en.wikipedia.org/wiki/Power_iteration + + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept( + "cannot compute centrality for the null graph" + ) + # If no initial vector is provided, start with the all-ones vector. + if nstart is None: + nstart = dict.fromkeys(G, 1) + if all(v == 0 for v in nstart.values()): + raise nx.NetworkXError("initial vector cannot have all zero values") + # Normalize the initial vector so that each entry is in [0, 1]. This is + # guaranteed to never have a divide-by-zero error by the previous line. + nstart_sum = sum(nstart.values()) + x = {k: v / nstart_sum for k, v in nstart.items()} + nnodes = G.number_of_nodes() + # make up to max_iter iterations + for _ in range(max_iter): + xlast = x + x = xlast.copy() # Start with xlast times I to iterate with (A+I) + # do the multiplication y^T = x^T A (left eigenvector) + for n in x: + for nbr in G[n]: + w = G[n][nbr].get(weight, 1) if weight else 1 + x[nbr] += xlast[n] * w + # Normalize the vector. The normalization denominator `norm` + # should never be zero by the Perron--Frobenius + # theorem. However, in case it is due to numerical error, we + # assume the norm to be one instead. + norm = math.hypot(*x.values()) or 1 + x = {k: v / norm for k, v in x.items()} + # Check for convergence (in the L_1 norm). + if sum(abs(x[n] - xlast[n]) for n in x) < nnodes * tol: + return x + raise nx.PowerIterationFailedConvergence(max_iter) + + +@nx._dispatchable(edge_attrs="weight") +def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0): + r"""Compute the eigenvector centrality for the graph `G`. + + Eigenvector centrality computes the centrality for a node by adding + the centrality of its predecessors. The centrality for node $i$ is the + $i$-th element of a left eigenvector associated with the eigenvalue $\lambda$ + of maximum modulus that is positive. Such an eigenvector $x$ is + defined up to a multiplicative constant by the equation + + .. math:: + + \lambda x^T = x^T A, + + where $A$ is the adjacency matrix of the graph `G`. By definition of + row-column product, the equation above is equivalent to + + .. math:: + + \lambda x_i = \sum_{j\to i}x_j. + + That is, adding the eigenvector centralities of the predecessors of + $i$ one obtains the eigenvector centrality of $i$ multiplied by + $\lambda$. In the case of undirected graphs, $x$ also solves the familiar + right-eigenvector equation $Ax = \lambda x$. + + By virtue of the Perron--Frobenius theorem [1]_, if `G` is (strongly) + connected, there is a unique eigenvector $x$, and all its entries + are strictly positive. + + However, if `G` is not (strongly) connected, there might be several left + eigenvectors associated with $\lambda$, and some of their elements + might be zero. + Depending on the method used to choose eigenvectors, round-off error can affect + which of the infinitely many eigenvectors is reported. + This can lead to inconsistent results for the same graph, + which the underlying implementation is not robust to. + For this reason, only (strongly) connected graphs are accepted. + + Parameters + ---------- + G : graph + A connected NetworkX graph. + + weight : None or string, optional (default=None) + If ``None``, all edge weights are considered equal. Otherwise holds the + name of the edge attribute used as weight. In this measure the + weight is interpreted as the connection strength. + + max_iter : integer, optional (default=50) + Maximum number of Arnoldi update iterations allowed. + + tol : float, optional (default=0) + Relative accuracy for eigenvalues (stopping criterion). + The default value of 0 implies machine precision. + + Returns + ------- + nodes : dict of nodes + Dictionary of nodes with eigenvector centrality as the value. The + associated vector has unit Euclidean norm and the values are + nonnegative. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> centrality = nx.eigenvector_centrality_numpy(G) + >>> print([f"{node} {centrality[node]:0.2f}" for node in centrality]) + ['0 0.37', '1 0.60', '2 0.60', '3 0.37'] + + Raises + ------ + NetworkXPointlessConcept + If the graph `G` is the null graph. + + ArpackNoConvergence + When the requested convergence is not obtained. The currently + converged eigenvalues and eigenvectors can be found as + eigenvalues and eigenvectors attributes of the exception object. + + AmbiguousSolution + If `G` is not connected. + + See Also + -------- + :func:`scipy.sparse.linalg.eigs` + eigenvector_centrality + :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank` + :func:`~networkx.algorithms.link_analysis.hits_alg.hits` + + Notes + ----- + Eigenvector centrality was introduced by Landau [2]_ for chess + tournaments. It was later rediscovered by Wei [3]_ and then + popularized by Kendall [4]_ in the context of sport ranking. Berge + introduced a general definition for graphs based on social connections + [5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made + it popular in link analysis. + + This function computes the left dominant eigenvector, which corresponds + to adding the centrality of predecessors: this is the usual approach. + To add the centrality of successors first reverse the graph with + ``G.reverse()``. + + This implementation uses the + :func:`SciPy sparse eigenvalue solver` (ARPACK) + to find the largest eigenvalue/eigenvector pair using Arnoldi iterations + [7]_. + + References + ---------- + .. [1] Abraham Berman and Robert J. Plemmons. + "Nonnegative Matrices in the Mathematical Sciences". + Classics in Applied Mathematics. SIAM, 1994. + + .. [2] Edmund Landau. + "Zur relativen Wertbemessung der Turnierresultate". + Deutsches Wochenschach, 11:366--369, 1895. + + .. [3] Teh-Hsing Wei. + "The Algebraic Foundations of Ranking Theory". + PhD thesis, University of Cambridge, 1952. + + .. [4] Maurice G. Kendall. + "Further contributions to the theory of paired comparisons". + Biometrics, 11(1):43--62, 1955. + https://www.jstor.org/stable/3001479 + + .. [5] Claude Berge. + "Théorie des graphes et ses applications". + Dunod, Paris, France, 1958. + + .. [6] Phillip Bonacich. + "Technique for analyzing overlapping memberships". + Sociological Methodology, 4:176--185, 1972. + https://www.jstor.org/stable/270732 + + .. [7] Arnoldi, W. E. (1951). + "The principle of minimized iterations in the solution of the matrix eigenvalue problem". + Quarterly of Applied Mathematics. 9 (1): 17--29. + https://doi.org/10.1090/qam/42792 + """ + import numpy as np + import scipy as sp + + if len(G) == 0: + raise nx.NetworkXPointlessConcept( + "cannot compute centrality for the null graph" + ) + connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G) + if not connected: # See gh-6888. + raise nx.AmbiguousSolution( + "`eigenvector_centrality_numpy` does not give consistent results for disconnected graphs" + ) + M = nx.to_scipy_sparse_array(G, nodelist=list(G), weight=weight, dtype=float) + _, eigenvector = sp.sparse.linalg.eigs( + M.T, k=1, which="LR", maxiter=max_iter, tol=tol + ) + largest = eigenvector.flatten().real + norm = np.sign(largest.sum()) * sp.linalg.norm(largest) + return dict(zip(G, (largest / norm).tolist())) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/flow_matrix.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/flow_matrix.py new file mode 100644 index 0000000..e72b5e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/flow_matrix.py @@ -0,0 +1,130 @@ +# Helpers for current-flow betweenness and current-flow closeness +# Lazy computations for inverse Laplacian and flow-matrix rows. +import networkx as nx + + +@nx._dispatchable(edge_attrs="weight") +def flow_matrix_row(G, weight=None, dtype=float, solver="lu"): + # Generate a row of the current-flow matrix + import numpy as np + + solvername = { + "full": FullInverseLaplacian, + "lu": SuperLUInverseLaplacian, + "cg": CGInverseLaplacian, + } + n = G.number_of_nodes() + L = nx.laplacian_matrix(G, nodelist=range(n), weight=weight).asformat("csc") + L = L.astype(dtype) + C = solvername[solver](L, dtype=dtype) # initialize solver + w = C.w # w is the Laplacian matrix width + # row-by-row flow matrix + for u, v in sorted(sorted((u, v)) for u, v in G.edges()): + B = np.zeros(w, dtype=dtype) + c = G[u][v].get(weight, 1.0) + B[u % w] = c + B[v % w] = -c + # get only the rows needed in the inverse laplacian + # and multiply to get the flow matrix row + row = B @ C.get_rows(u, v) + yield row, (u, v) + + +# Class to compute the inverse laplacian only for specified rows +# Allows computation of the current-flow matrix without storing entire +# inverse laplacian matrix +class InverseLaplacian: + def __init__(self, L, width=None, dtype=None): + global np + import numpy as np + + (n, n) = L.shape + self.dtype = dtype + self.n = n + if width is None: + self.w = self.width(L) + else: + self.w = width + self.C = np.zeros((self.w, n), dtype=dtype) + self.L1 = L[1:, 1:] + self.init_solver(L) + + def init_solver(self, L): + pass + + def solve(self, r): + raise nx.NetworkXError("Implement solver") + + def solve_inverse(self, r): + raise nx.NetworkXError("Implement solver") + + def get_rows(self, r1, r2): + for r in range(r1, r2 + 1): + self.C[r % self.w, 1:] = self.solve_inverse(r) + return self.C + + def get_row(self, r): + self.C[r % self.w, 1:] = self.solve_inverse(r) + return self.C[r % self.w] + + def width(self, L): + m = 0 + for i, row in enumerate(L): + w = 0 + y = np.nonzero(row)[-1] + if len(y) > 0: + v = y - i + w = v.max() - v.min() + 1 + m = max(w, m) + return m + + +class FullInverseLaplacian(InverseLaplacian): + def init_solver(self, L): + self.IL = np.zeros(L.shape, dtype=self.dtype) + self.IL[1:, 1:] = np.linalg.inv(self.L1.todense()) + + def solve(self, rhs): + s = np.zeros(rhs.shape, dtype=self.dtype) + s = self.IL @ rhs + return s + + def solve_inverse(self, r): + return self.IL[r, 1:] + + +class SuperLUInverseLaplacian(InverseLaplacian): + def init_solver(self, L): + import scipy as sp + + self.lusolve = sp.sparse.linalg.factorized(self.L1.tocsc()) + + def solve_inverse(self, r): + rhs = np.zeros(self.n, dtype=self.dtype) + rhs[r] = 1 + return self.lusolve(rhs[1:]) + + def solve(self, rhs): + s = np.zeros(rhs.shape, dtype=self.dtype) + s[1:] = self.lusolve(rhs[1:]) + return s + + +class CGInverseLaplacian(InverseLaplacian): + def init_solver(self, L): + global sp + import scipy as sp + + ilu = sp.sparse.linalg.spilu(self.L1.tocsc()) + n = self.n - 1 + self.M = sp.sparse.linalg.LinearOperator(shape=(n, n), matvec=ilu.solve) + + def solve(self, rhs): + s = np.zeros(rhs.shape, dtype=self.dtype) + s[1:] = sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0] + return s + + def solve_inverse(self, r): + rhs = np.zeros(self.n, self.dtype) + rhs[r] = 1 + return sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/group.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/group.py new file mode 100644 index 0000000..cff1d47 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/group.py @@ -0,0 +1,787 @@ +"""Group centrality measures.""" + +from copy import deepcopy + +import networkx as nx +from networkx.algorithms.centrality.betweenness import ( + _accumulate_endpoints, + _single_source_dijkstra_path_basic, + _single_source_shortest_path_basic, +) +from networkx.utils.decorators import not_implemented_for + +__all__ = [ + "group_betweenness_centrality", + "group_closeness_centrality", + "group_degree_centrality", + "group_in_degree_centrality", + "group_out_degree_centrality", + "prominent_group", +] + + +@nx._dispatchable(edge_attrs="weight") +def group_betweenness_centrality(G, C, normalized=True, weight=None, endpoints=False): + r"""Compute the group betweenness centrality for a group of nodes. + + Group betweenness centrality of a group of nodes $C$ is the sum of the + fraction of all-pairs shortest paths that pass through any vertex in $C$ + + .. math:: + + c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)} + + where $V$ is the set of nodes, $\sigma(s, t)$ is the number of + shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of + those paths passing through some node in group $C$. Note that + $(s, t)$ are not members of the group ($V-C$ is the set of nodes + in $V$ that are not in $C$). + + Parameters + ---------- + G : graph + A NetworkX graph. + + C : list or set or list of lists or list of sets + A group or a list of groups containing nodes which belong to G, for which group betweenness + centrality is to be calculated. + + normalized : bool, optional (default=True) + If True, group betweenness is normalized by `1/((|V|-|C|)(|V|-|C|-1))` + where `|V|` is the number of nodes in G and `|C|` is the number of nodes in C. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + The weight of an edge is treated as the length or distance between the two sides. + + endpoints : bool, optional (default=False) + If True include the endpoints in the shortest path counts. + + Raises + ------ + NodeNotFound + If node(s) in C are not present in G. + + Returns + ------- + betweenness : list of floats or float + If C is a single group then return a float. If C is a list with + several groups then return a list of group betweenness centralities. + + See Also + -------- + betweenness_centrality + + Notes + ----- + Group betweenness centrality is described in [1]_ and its importance discussed in [3]_. + The initial implementation of the algorithm is mentioned in [2]_. This function uses + an improved algorithm presented in [4]_. + + The number of nodes in the group must be a maximum of n - 2 where `n` + is the total number of nodes in the graph. + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + The total number of paths between source and target is counted + differently for directed and undirected graphs. Directed paths + between "u" and "v" are counted as two possible paths (one each + direction) while undirected paths between "u" and "v" are counted + as one path. Said another way, the sum in the expression above is + over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs. + + + References + ---------- + .. [1] M G Everett and S P Borgatti: + The Centrality of Groups and Classes. + Journal of Mathematical Sociology. 23(3): 181-201. 1999. + http://www.analytictech.com/borgatti/group_centrality.htm + .. [2] Ulrik Brandes: + On Variants of Shortest-Path Betweenness + Centrality and their Generic Computation. + Social Networks 30(2):136-145, 2008. + http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.72.9610&rep=rep1&type=pdf + .. [3] Sourav Medya et. al.: + Group Centrality Maximization via Network Design. + SIAM International Conference on Data Mining, SDM 2018, 126–134. + https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf + .. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev. + "Fast algorithm for successive computation of group betweenness centrality." + https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709 + + """ + GBC = [] # initialize betweenness + list_of_groups = True + # check weather C contains one or many groups + if any(el in G for el in C): + C = [C] + list_of_groups = False + set_v = {node for group in C for node in group} + if set_v - G.nodes: # element(s) of C not in G + raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are in C but not in G.") + + # pre-processing + PB, sigma, D = _group_preprocessing(G, set_v, weight) + + # the algorithm for each group + for group in C: + group = set(group) # set of nodes in group + # initialize the matrices of the sigma and the PB + GBC_group = 0 + sigma_m = deepcopy(sigma) + PB_m = deepcopy(PB) + sigma_m_v = deepcopy(sigma_m) + PB_m_v = deepcopy(PB_m) + for v in group: + GBC_group += PB_m[v][v] + for x in group: + for y in group: + dxvy = 0 + dxyv = 0 + dvxy = 0 + if not ( + sigma_m[x][y] == 0 or sigma_m[x][v] == 0 or sigma_m[v][y] == 0 + ): + if D[x][v] == D[x][y] + D[y][v]: + dxyv = sigma_m[x][y] * sigma_m[y][v] / sigma_m[x][v] + if D[x][y] == D[x][v] + D[v][y]: + dxvy = sigma_m[x][v] * sigma_m[v][y] / sigma_m[x][y] + if D[v][y] == D[v][x] + D[x][y]: + dvxy = sigma_m[v][x] * sigma[x][y] / sigma[v][y] + sigma_m_v[x][y] = sigma_m[x][y] * (1 - dxvy) + PB_m_v[x][y] = PB_m[x][y] - PB_m[x][y] * dxvy + if y != v: + PB_m_v[x][y] -= PB_m[x][v] * dxyv + if x != v: + PB_m_v[x][y] -= PB_m[v][y] * dvxy + sigma_m, sigma_m_v = sigma_m_v, sigma_m + PB_m, PB_m_v = PB_m_v, PB_m + + # endpoints + v, c = len(G), len(group) + if not endpoints: + scale = 0 + # if the graph is connected then subtract the endpoints from + # the count for all the nodes in the graph. else count how many + # nodes are connected to the group's nodes and subtract that. + if nx.is_directed(G): + if nx.is_strongly_connected(G): + scale = c * (2 * v - c - 1) + elif nx.is_connected(G): + scale = c * (2 * v - c - 1) + if scale == 0: + for group_node1 in group: + for node in D[group_node1]: + if node != group_node1: + if node in group: + scale += 1 + else: + scale += 2 + GBC_group -= scale + + # normalized + if normalized: + scale = 1 / ((v - c) * (v - c - 1)) + GBC_group *= scale + + # If undirected than count only the undirected edges + elif not G.is_directed(): + GBC_group /= 2 + + GBC.append(GBC_group) + if list_of_groups: + return GBC + return GBC[0] + + +def _group_preprocessing(G, set_v, weight): + sigma = {} + delta = {} + D = {} + betweenness = dict.fromkeys(G, 0) + for s in G: + if weight is None: # use BFS + S, P, sigma[s], D[s] = _single_source_shortest_path_basic(G, s) + else: # use Dijkstra's algorithm + S, P, sigma[s], D[s] = _single_source_dijkstra_path_basic(G, s, weight) + betweenness, delta[s] = _accumulate_endpoints(betweenness, S, P, sigma[s], s) + for i in delta[s]: # add the paths from s to i and rescale sigma + if s != i: + delta[s][i] += 1 + if weight is not None: + sigma[s][i] = sigma[s][i] / 2 + # building the path betweenness matrix only for nodes that appear in the group + PB = dict.fromkeys(G) + for group_node1 in set_v: + PB[group_node1] = dict.fromkeys(G, 0.0) + for group_node2 in set_v: + if group_node2 not in D[group_node1]: + continue + for node in G: + # if node is connected to the two group nodes than continue + if group_node2 in D[node] and group_node1 in D[node]: + if ( + D[node][group_node2] + == D[node][group_node1] + D[group_node1][group_node2] + ): + PB[group_node1][group_node2] += ( + delta[node][group_node2] + * sigma[node][group_node1] + * sigma[group_node1][group_node2] + / sigma[node][group_node2] + ) + return PB, sigma, D + + +@nx._dispatchable(edge_attrs="weight") +def prominent_group( + G, k, weight=None, C=None, endpoints=False, normalized=True, greedy=False +): + r"""Find the prominent group of size $k$ in graph $G$. The prominence of the + group is evaluated by the group betweenness centrality. + + Group betweenness centrality of a group of nodes $C$ is the sum of the + fraction of all-pairs shortest paths that pass through any vertex in $C$ + + .. math:: + + c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)} + + where $V$ is the set of nodes, $\sigma(s, t)$ is the number of + shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of + those paths passing through some node in group $C$. Note that + $(s, t)$ are not members of the group ($V-C$ is the set of nodes + in $V$ that are not in $C$). + + Parameters + ---------- + G : graph + A NetworkX graph. + + k : int + The number of nodes in the group. + + normalized : bool, optional (default=True) + If True, group betweenness is normalized by ``1/((|V|-|C|)(|V|-|C|-1))`` + where ``|V|`` is the number of nodes in G and ``|C|`` is the number of + nodes in C. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + The weight of an edge is treated as the length or distance between the two sides. + + endpoints : bool, optional (default=False) + If True include the endpoints in the shortest path counts. + + C : list or set, optional (default=None) + list of nodes which won't be candidates of the prominent group. + + greedy : bool, optional (default=False) + Using a naive greedy algorithm in order to find non-optimal prominent + group. For scale free networks the results are negligibly below the optimal + results. + + Raises + ------ + NodeNotFound + If node(s) in C are not present in G. + + Returns + ------- + max_GBC : float + The group betweenness centrality of the prominent group. + + max_group : list + The list of nodes in the prominent group. + + See Also + -------- + betweenness_centrality, group_betweenness_centrality + + Notes + ----- + Group betweenness centrality is described in [1]_ and its importance discussed in [3]_. + The algorithm is described in [2]_ and is based on techniques mentioned in [4]_. + + The number of nodes in the group must be a maximum of ``n - 2`` where ``n`` + is the total number of nodes in the graph. + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + The total number of paths between source and target is counted + differently for directed and undirected graphs. Directed paths + between "u" and "v" are counted as two possible paths (one each + direction) while undirected paths between "u" and "v" are counted + as one path. Said another way, the sum in the expression above is + over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs. + + References + ---------- + .. [1] M G Everett and S P Borgatti: + The Centrality of Groups and Classes. + Journal of Mathematical Sociology. 23(3): 181-201. 1999. + http://www.analytictech.com/borgatti/group_centrality.htm + .. [2] Rami Puzis, Yuval Elovici, and Shlomi Dolev: + "Finding the Most Prominent Group in Complex Networks" + AI communications 20(4): 287-296, 2007. + https://www.researchgate.net/profile/Rami_Puzis2/publication/220308855 + .. [3] Sourav Medya et. al.: + Group Centrality Maximization via Network Design. + SIAM International Conference on Data Mining, SDM 2018, 126–134. + https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf + .. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev. + "Fast algorithm for successive computation of group betweenness centrality." + https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709 + """ + import numpy as np + import pandas as pd + + if C is not None: + C = set(C) + if C - G.nodes: # element(s) of C not in G + raise nx.NodeNotFound(f"The node(s) {C - G.nodes} are in C but not in G.") + nodes = list(G.nodes - C) + else: + nodes = list(G.nodes) + DF_tree = nx.Graph() + DF_tree.__networkx_cache__ = None # Disable caching + PB, sigma, D = _group_preprocessing(G, nodes, weight) + betweenness = pd.DataFrame.from_dict(PB) + if C is not None: + for node in C: + # remove from the betweenness all the nodes not part of the group + betweenness = betweenness.drop(index=node) + betweenness = betweenness.drop(columns=node) + CL = [node for _, node in sorted(zip(np.diag(betweenness), nodes), reverse=True)] + max_GBC = 0 + max_group = [] + DF_tree.add_node( + 1, + CL=CL, + betweenness=betweenness, + GBC=0, + GM=[], + sigma=sigma, + cont=dict(zip(nodes, np.diag(betweenness))), + ) + + # the algorithm + DF_tree.nodes[1]["heu"] = 0 + for i in range(k): + DF_tree.nodes[1]["heu"] += DF_tree.nodes[1]["cont"][DF_tree.nodes[1]["CL"][i]] + max_GBC, DF_tree, max_group = _dfbnb( + G, k, DF_tree, max_GBC, 1, D, max_group, nodes, greedy + ) + + v = len(G) + if not endpoints: + scale = 0 + # if the graph is connected then subtract the endpoints from + # the count for all the nodes in the graph. else count how many + # nodes are connected to the group's nodes and subtract that. + if nx.is_directed(G): + if nx.is_strongly_connected(G): + scale = k * (2 * v - k - 1) + elif nx.is_connected(G): + scale = k * (2 * v - k - 1) + if scale == 0: + for group_node1 in max_group: + for node in D[group_node1]: + if node != group_node1: + if node in max_group: + scale += 1 + else: + scale += 2 + max_GBC -= scale + + # normalized + if normalized: + scale = 1 / ((v - k) * (v - k - 1)) + max_GBC *= scale + + # If undirected then count only the undirected edges + elif not G.is_directed(): + max_GBC /= 2 + max_GBC = float(f"{max_GBC:.2f}") + return max_GBC, max_group + + +def _dfbnb(G, k, DF_tree, max_GBC, root, D, max_group, nodes, greedy): + # stopping condition - if we found a group of size k and with higher GBC then prune + if len(DF_tree.nodes[root]["GM"]) == k and DF_tree.nodes[root]["GBC"] > max_GBC: + return DF_tree.nodes[root]["GBC"], DF_tree, DF_tree.nodes[root]["GM"] + # stopping condition - if the size of group members equal to k or there are less than + # k - |GM| in the candidate list or the heuristic function plus the GBC is below the + # maximal GBC found then prune + if ( + len(DF_tree.nodes[root]["GM"]) == k + or len(DF_tree.nodes[root]["CL"]) <= k - len(DF_tree.nodes[root]["GM"]) + or DF_tree.nodes[root]["GBC"] + DF_tree.nodes[root]["heu"] <= max_GBC + ): + return max_GBC, DF_tree, max_group + + # finding the heuristic of both children + node_p, node_m, DF_tree = _heuristic(k, root, DF_tree, D, nodes, greedy) + + # finding the child with the bigger heuristic + GBC and expand + # that node first if greedy then only expand the plus node + if greedy: + max_GBC, DF_tree, max_group = _dfbnb( + G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy + ) + + elif ( + DF_tree.nodes[node_p]["GBC"] + DF_tree.nodes[node_p]["heu"] + > DF_tree.nodes[node_m]["GBC"] + DF_tree.nodes[node_m]["heu"] + ): + max_GBC, DF_tree, max_group = _dfbnb( + G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy + ) + max_GBC, DF_tree, max_group = _dfbnb( + G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy + ) + else: + max_GBC, DF_tree, max_group = _dfbnb( + G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy + ) + max_GBC, DF_tree, max_group = _dfbnb( + G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy + ) + return max_GBC, DF_tree, max_group + + +def _heuristic(k, root, DF_tree, D, nodes, greedy): + import numpy as np + + # This helper function add two nodes to DF_tree - one left son and the + # other right son, finds their heuristic, CL, GBC, and GM + node_p = DF_tree.number_of_nodes() + 1 + node_m = DF_tree.number_of_nodes() + 2 + added_node = DF_tree.nodes[root]["CL"][0] + + # adding the plus node + DF_tree.add_nodes_from([(node_p, deepcopy(DF_tree.nodes[root]))]) + DF_tree.nodes[node_p]["GM"].append(added_node) + DF_tree.nodes[node_p]["GBC"] += DF_tree.nodes[node_p]["cont"][added_node] + root_node = DF_tree.nodes[root] + for x in nodes: + for y in nodes: + dxvy = 0 + dxyv = 0 + dvxy = 0 + if not ( + root_node["sigma"][x][y] == 0 + or root_node["sigma"][x][added_node] == 0 + or root_node["sigma"][added_node][y] == 0 + ): + if D[x][added_node] == D[x][y] + D[y][added_node]: + dxyv = ( + root_node["sigma"][x][y] + * root_node["sigma"][y][added_node] + / root_node["sigma"][x][added_node] + ) + if D[x][y] == D[x][added_node] + D[added_node][y]: + dxvy = ( + root_node["sigma"][x][added_node] + * root_node["sigma"][added_node][y] + / root_node["sigma"][x][y] + ) + if D[added_node][y] == D[added_node][x] + D[x][y]: + dvxy = ( + root_node["sigma"][added_node][x] + * root_node["sigma"][x][y] + / root_node["sigma"][added_node][y] + ) + DF_tree.nodes[node_p]["sigma"][x][y] = root_node["sigma"][x][y] * (1 - dxvy) + DF_tree.nodes[node_p]["betweenness"].loc[y, x] = ( + root_node["betweenness"][x][y] - root_node["betweenness"][x][y] * dxvy + ) + if y != added_node: + DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= ( + root_node["betweenness"][x][added_node] * dxyv + ) + if x != added_node: + DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= ( + root_node["betweenness"][added_node][y] * dvxy + ) + + DF_tree.nodes[node_p]["CL"] = [ + node + for _, node in sorted( + zip(np.diag(DF_tree.nodes[node_p]["betweenness"]), nodes), reverse=True + ) + if node not in DF_tree.nodes[node_p]["GM"] + ] + DF_tree.nodes[node_p]["cont"] = dict( + zip(nodes, np.diag(DF_tree.nodes[node_p]["betweenness"])) + ) + DF_tree.nodes[node_p]["heu"] = 0 + for i in range(k - len(DF_tree.nodes[node_p]["GM"])): + DF_tree.nodes[node_p]["heu"] += DF_tree.nodes[node_p]["cont"][ + DF_tree.nodes[node_p]["CL"][i] + ] + + # adding the minus node - don't insert the first node in the CL to GM + # Insert minus node only if isn't greedy type algorithm + if not greedy: + DF_tree.add_nodes_from([(node_m, deepcopy(DF_tree.nodes[root]))]) + DF_tree.nodes[node_m]["CL"].pop(0) + DF_tree.nodes[node_m]["cont"].pop(added_node) + DF_tree.nodes[node_m]["heu"] = 0 + for i in range(k - len(DF_tree.nodes[node_m]["GM"])): + DF_tree.nodes[node_m]["heu"] += DF_tree.nodes[node_m]["cont"][ + DF_tree.nodes[node_m]["CL"][i] + ] + else: + node_m = None + + return node_p, node_m, DF_tree + + +@nx._dispatchable(edge_attrs="weight") +def group_closeness_centrality(G, S, weight=None): + r"""Compute the group closeness centrality for a group of nodes. + + Group closeness centrality of a group of nodes $S$ is a measure + of how close the group is to the other nodes in the graph. + + .. math:: + + c_{close}(S) = \frac{|V-S|}{\sum_{v \in V-S} d_{S, v}} + + d_{S, v} = min_{u \in S} (d_{u, v}) + + where $V$ is the set of nodes, $d_{S, v}$ is the distance of + the group $S$ from $v$ defined as above. ($V-S$ is the set of nodes + in $V$ that are not in $S$). + + Parameters + ---------- + G : graph + A NetworkX graph. + + S : list or set + S is a group of nodes which belong to G, for which group closeness + centrality is to be calculated. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + The weight of an edge is treated as the length or distance between the two sides. + + Raises + ------ + NodeNotFound + If node(s) in S are not present in G. + + Returns + ------- + closeness : float + Group closeness centrality of the group S. + + See Also + -------- + closeness_centrality + + Notes + ----- + The measure was introduced in [1]_. + The formula implemented here is described in [2]_. + + Higher values of closeness indicate greater centrality. + + It is assumed that 1 / 0 is 0 (required in the case of directed graphs, + or when a shortest path length is 0). + + The number of nodes in the group must be a maximum of n - 1 where `n` + is the total number of nodes in the graph. + + For directed graphs, the incoming distance is utilized here. To use the + outward distance, act on `G.reverse()`. + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + References + ---------- + .. [1] M G Everett and S P Borgatti: + The Centrality of Groups and Classes. + Journal of Mathematical Sociology. 23(3): 181-201. 1999. + http://www.analytictech.com/borgatti/group_centrality.htm + .. [2] J. Zhao et. al.: + Measuring and Maximizing Group Closeness Centrality over + Disk Resident Graphs. + WWWConference Proceedings, 2014. 689-694. + https://doi.org/10.1145/2567948.2579356 + """ + if G.is_directed(): + G = G.reverse() # reverse view + closeness = 0 # initialize to 0 + V = set(G) # set of nodes in G + S = set(S) # set of nodes in group S + V_S = V - S # set of nodes in V but not S + shortest_path_lengths = nx.multi_source_dijkstra_path_length(G, S, weight=weight) + # accumulation + for v in V_S: + try: + closeness += shortest_path_lengths[v] + except KeyError: # no path exists + closeness += 0 + try: + closeness = len(V_S) / closeness + except ZeroDivisionError: # 1 / 0 assumed as 0 + closeness = 0 + return closeness + + +@nx._dispatchable +def group_degree_centrality(G, S): + """Compute the group degree centrality for a group of nodes. + + Group degree centrality of a group of nodes $S$ is the fraction + of non-group members connected to group members. + + Parameters + ---------- + G : graph + A NetworkX graph. + + S : list or set + S is a group of nodes which belong to G, for which group degree + centrality is to be calculated. + + Raises + ------ + NetworkXError + If node(s) in S are not in G. + + Returns + ------- + centrality : float + Group degree centrality of the group S. + + See Also + -------- + degree_centrality + group_in_degree_centrality + group_out_degree_centrality + + Notes + ----- + The measure was introduced in [1]_. + + The number of nodes in the group must be a maximum of n - 1 where `n` + is the total number of nodes in the graph. + + References + ---------- + .. [1] M G Everett and S P Borgatti: + The Centrality of Groups and Classes. + Journal of Mathematical Sociology. 23(3): 181-201. 1999. + http://www.analytictech.com/borgatti/group_centrality.htm + """ + centrality = len(set().union(*[set(G.neighbors(i)) for i in S]) - set(S)) + centrality /= len(G.nodes()) - len(S) + return centrality + + +@not_implemented_for("undirected") +@nx._dispatchable +def group_in_degree_centrality(G, S): + """Compute the group in-degree centrality for a group of nodes. + + Group in-degree centrality of a group of nodes $S$ is the fraction + of non-group members connected to group members by incoming edges. + + Parameters + ---------- + G : graph + A NetworkX graph. + + S : list or set + S is a group of nodes which belong to G, for which group in-degree + centrality is to be calculated. + + Returns + ------- + centrality : float + Group in-degree centrality of the group S. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + NodeNotFound + If node(s) in S are not in G. + + See Also + -------- + degree_centrality + group_degree_centrality + group_out_degree_centrality + + Notes + ----- + The number of nodes in the group must be a maximum of n - 1 where `n` + is the total number of nodes in the graph. + + `G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph, + so for group in-degree centrality, the reverse graph is used. + """ + return group_degree_centrality(G.reverse(), S) + + +@not_implemented_for("undirected") +@nx._dispatchable +def group_out_degree_centrality(G, S): + """Compute the group out-degree centrality for a group of nodes. + + Group out-degree centrality of a group of nodes $S$ is the fraction + of non-group members connected to group members by outgoing edges. + + Parameters + ---------- + G : graph + A NetworkX graph. + + S : list or set + S is a group of nodes which belong to G, for which group in-degree + centrality is to be calculated. + + Returns + ------- + centrality : float + Group out-degree centrality of the group S. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + NodeNotFound + If node(s) in S are not in G. + + See Also + -------- + degree_centrality + group_degree_centrality + group_in_degree_centrality + + Notes + ----- + The number of nodes in the group must be a maximum of n - 1 where `n` + is the total number of nodes in the graph. + + `G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph, + so for group out-degree centrality, the graph itself is used. + """ + return group_degree_centrality(G, S) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/harmonic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/harmonic.py new file mode 100644 index 0000000..26702a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/harmonic.py @@ -0,0 +1,89 @@ +"""Functions for computing the harmonic centrality of a graph.""" + +from functools import partial + +import networkx as nx + +__all__ = ["harmonic_centrality"] + + +@nx._dispatchable(edge_attrs="distance") +def harmonic_centrality(G, nbunch=None, distance=None, sources=None): + r"""Compute harmonic centrality for nodes. + + Harmonic centrality [1]_ of a node `u` is the sum of the reciprocal + of the shortest path distances from all other nodes to `u` + + .. math:: + + C(u) = \sum_{v \neq u} \frac{1}{d(v, u)} + + where `d(v, u)` is the shortest-path distance between `v` and `u`. + + If `sources` is given as an argument, the returned harmonic centrality + values are calculated as the sum of the reciprocals of the shortest + path distances from the nodes specified in `sources` to `u` instead + of from all nodes to `u`. + + Notice that higher values indicate higher centrality. + + Parameters + ---------- + G : graph + A NetworkX graph + + nbunch : container (default: all nodes in G) + Container of nodes for which harmonic centrality values are calculated. + + sources : container (default: all nodes in G) + Container of nodes `v` over which reciprocal distances are computed. + Nodes not in `G` are silently ignored. + + distance : edge attribute key, optional (default=None) + Use the specified edge attribute as the edge distance in shortest + path calculations. If `None`, then each edge will have distance equal to 1. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with harmonic centrality as the value. + + See Also + -------- + betweenness_centrality, load_centrality, eigenvector_centrality, + degree_centrality, closeness_centrality + + Notes + ----- + If the 'distance' keyword is set to an edge attribute key then the + shortest-path length will be computed using Dijkstra's algorithm with + that edge attribute as the edge weight. + + References + ---------- + .. [1] Boldi, Paolo, and Sebastiano Vigna. "Axioms for centrality." + Internet Mathematics 10.3-4 (2014): 222-262. + """ + + nbunch = set(G.nbunch_iter(nbunch) if nbunch is not None else G.nodes) + sources = set(G.nbunch_iter(sources) if sources is not None else G.nodes) + + centrality = dict.fromkeys(nbunch, 0) + + transposed = False + if len(nbunch) < len(sources): + transposed = True + nbunch, sources = sources, nbunch + if nx.is_directed(G): + G = nx.reverse(G, copy=False) + + spl = partial(nx.shortest_path_length, G, weight=distance) + for v in sources: + dist = spl(v) + for u in nbunch.intersection(dist): + d = dist[u] + if d == 0: # handle u == v and edges with 0 weight + continue + centrality[v if transposed else u] += 1 / d + + return centrality diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/katz.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/katz.py new file mode 100644 index 0000000..c4ec9e0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/katz.py @@ -0,0 +1,331 @@ +"""Katz centrality.""" + +import math + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["katz_centrality", "katz_centrality_numpy"] + + +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def katz_centrality( + G, + alpha=0.1, + beta=1.0, + max_iter=1000, + tol=1.0e-6, + nstart=None, + normalized=True, + weight=None, +): + r"""Compute the Katz centrality for the nodes of the graph G. + + Katz centrality computes the centrality for a node based on the centrality + of its neighbors. It is a generalization of the eigenvector centrality. The + Katz centrality for node $i$ is + + .. math:: + + x_i = \alpha \sum_{j} A_{ij} x_j + \beta, + + where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$. + + The parameter $\beta$ controls the initial centrality and + + .. math:: + + \alpha < \frac{1}{\lambda_{\max}}. + + Katz centrality computes the relative influence of a node within a + network by measuring the number of the immediate neighbors (first + degree nodes) and also all other nodes in the network that connect + to the node under consideration through these immediate neighbors. + + Extra weight can be provided to immediate neighbors through the + parameter $\beta$. Connections made with distant neighbors + are, however, penalized by an attenuation factor $\alpha$ which + should be strictly less than the inverse largest eigenvalue of the + adjacency matrix in order for the Katz centrality to be computed + correctly. More information is provided in [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph. + + alpha : float, optional (default=0.1) + Attenuation factor + + beta : scalar or dictionary, optional (default=1.0) + Weight attributed to the immediate neighborhood. If not a scalar, the + dictionary must have a value for every node. + + max_iter : integer, optional (default=1000) + Maximum number of iterations in power method. + + tol : float, optional (default=1.0e-6) + Error tolerance used to check convergence in power method iteration. + + nstart : dictionary, optional + Starting value of Katz iteration for each node. + + normalized : bool, optional (default=True) + If True normalize the resulting values. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + In this measure the weight is interpreted as the connection strength. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with Katz centrality as the value. + + Raises + ------ + NetworkXError + If the parameter `beta` is not a scalar but lacks a value for at least + one node + + PowerIterationFailedConvergence + If the algorithm fails to converge to the specified tolerance + within the specified number of iterations of the power iteration + method. + + Examples + -------- + >>> import math + >>> G = nx.path_graph(4) + >>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix + >>> centrality = nx.katz_centrality(G, 1 / phi - 0.01) + >>> for n, c in sorted(centrality.items()): + ... print(f"{n} {c:.2f}") + 0 0.37 + 1 0.60 + 2 0.60 + 3 0.37 + + See Also + -------- + katz_centrality_numpy + eigenvector_centrality + eigenvector_centrality_numpy + :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank` + :func:`~networkx.algorithms.link_analysis.hits_alg.hits` + + Notes + ----- + Katz centrality was introduced by [2]_. + + This algorithm it uses the power method to find the eigenvector + corresponding to the largest eigenvalue of the adjacency matrix of ``G``. + The parameter ``alpha`` should be strictly less than the inverse of largest + eigenvalue of the adjacency matrix for the algorithm to converge. + You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest + eigenvalue of the adjacency matrix. + The iteration will stop after ``max_iter`` iterations or an error tolerance of + ``number_of_nodes(G) * tol`` has been reached. + + For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$, + Katz centrality approaches the results for eigenvector centrality. + + For directed graphs this finds "left" eigenvectors which corresponds + to the in-edges in the graph. For out-edges Katz centrality, + first reverse the graph with ``G.reverse()``. + + References + ---------- + .. [1] Mark E. J. Newman: + Networks: An Introduction. + Oxford University Press, USA, 2010, p. 720. + .. [2] Leo Katz: + A New Status Index Derived from Sociometric Index. + Psychometrika 18(1):39–43, 1953 + https://link.springer.com/content/pdf/10.1007/BF02289026.pdf + """ + if len(G) == 0: + return {} + + nnodes = G.number_of_nodes() + + if nstart is None: + # choose starting vector with entries of 0 + x = dict.fromkeys(G, 0) + else: + x = nstart + + try: + b = dict.fromkeys(G, float(beta)) + except (TypeError, ValueError, AttributeError) as err: + b = beta + if set(beta) != set(G): + raise nx.NetworkXError( + "beta dictionary must have a value for every node" + ) from err + + # make up to max_iter iterations + for _ in range(max_iter): + xlast = x + x = dict.fromkeys(xlast, 0) + # do the multiplication y^T = Alpha * x^T A + Beta + for n in x: + for nbr in G[n]: + x[nbr] += xlast[n] * G[n][nbr].get(weight, 1) + for n in x: + x[n] = alpha * x[n] + b[n] + + # check convergence + error = sum(abs(x[n] - xlast[n]) for n in x) + if error < nnodes * tol: + if normalized: + # normalize vector + try: + s = 1.0 / math.hypot(*x.values()) + except ZeroDivisionError: + s = 1.0 + else: + s = 1 + for n in x: + x[n] *= s + return x + raise nx.PowerIterationFailedConvergence(max_iter) + + +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None): + r"""Compute the Katz centrality for the graph G. + + Katz centrality computes the centrality for a node based on the centrality + of its neighbors. It is a generalization of the eigenvector centrality. The + Katz centrality for node $i$ is + + .. math:: + + x_i = \alpha \sum_{j} A_{ij} x_j + \beta, + + where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$. + + The parameter $\beta$ controls the initial centrality and + + .. math:: + + \alpha < \frac{1}{\lambda_{\max}}. + + Katz centrality computes the relative influence of a node within a + network by measuring the number of the immediate neighbors (first + degree nodes) and also all other nodes in the network that connect + to the node under consideration through these immediate neighbors. + + Extra weight can be provided to immediate neighbors through the + parameter $\beta$. Connections made with distant neighbors + are, however, penalized by an attenuation factor $\alpha$ which + should be strictly less than the inverse largest eigenvalue of the + adjacency matrix in order for the Katz centrality to be computed + correctly. More information is provided in [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + alpha : float + Attenuation factor + + beta : scalar or dictionary, optional (default=1.0) + Weight attributed to the immediate neighborhood. If not a scalar the + dictionary must have an value for every node. + + normalized : bool + If True normalize the resulting values. + + weight : None or string, optional + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + In this measure the weight is interpreted as the connection strength. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with Katz centrality as the value. + + Raises + ------ + NetworkXError + If the parameter `beta` is not a scalar but lacks a value for at least + one node + + Examples + -------- + >>> import math + >>> G = nx.path_graph(4) + >>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix + >>> centrality = nx.katz_centrality_numpy(G, 1 / phi) + >>> for n, c in sorted(centrality.items()): + ... print(f"{n} {c:.2f}") + 0 0.37 + 1 0.60 + 2 0.60 + 3 0.37 + + See Also + -------- + katz_centrality + eigenvector_centrality_numpy + eigenvector_centrality + :func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank` + :func:`~networkx.algorithms.link_analysis.hits_alg.hits` + + Notes + ----- + Katz centrality was introduced by [2]_. + + This algorithm uses a direct linear solver to solve the above equation. + The parameter ``alpha`` should be strictly less than the inverse of largest + eigenvalue of the adjacency matrix for there to be a solution. + You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest + eigenvalue of the adjacency matrix. + + For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$, + Katz centrality approaches the results for eigenvector centrality. + + For directed graphs this finds "left" eigenvectors which corresponds + to the in-edges in the graph. For out-edges Katz centrality, + first reverse the graph with ``G.reverse()``. + + References + ---------- + .. [1] Mark E. J. Newman: + Networks: An Introduction. + Oxford University Press, USA, 2010, p. 173. + .. [2] Leo Katz: + A New Status Index Derived from Sociometric Index. + Psychometrika 18(1):39–43, 1953 + https://link.springer.com/content/pdf/10.1007/BF02289026.pdf + """ + import numpy as np + + if len(G) == 0: + return {} + try: + nodelist = beta.keys() + if set(nodelist) != set(G): + raise nx.NetworkXError("beta dictionary must have a value for every node") + b = np.array(list(beta.values()), dtype=float) + except AttributeError: + nodelist = list(G) + try: + b = np.ones((len(nodelist), 1)) * beta + except (TypeError, ValueError, AttributeError) as err: + raise nx.NetworkXError("beta must be a number") from err + + A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight).todense().T + n = A.shape[0] + centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b).squeeze() + + # Normalize: rely on truediv to cast to float, then tolist to make Python numbers + norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) if normalized else 1 + return dict(zip(nodelist, (centrality / norm).tolist())) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/laplacian.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/laplacian.py new file mode 100644 index 0000000..2fa95d2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/laplacian.py @@ -0,0 +1,150 @@ +""" +Laplacian centrality measures. +""" + +import networkx as nx + +__all__ = ["laplacian_centrality"] + + +@nx._dispatchable(edge_attrs="weight") +def laplacian_centrality( + G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95 +): + r"""Compute the Laplacian centrality for nodes in the graph `G`. + + The Laplacian Centrality of a node ``i`` is measured by the drop in the + Laplacian Energy after deleting node ``i`` from the graph. The Laplacian Energy + is the sum of the squared eigenvalues of a graph's Laplacian matrix. + + .. math:: + + C_L(u_i,G) = \frac{(\Delta E)_i}{E_L (G)} = \frac{E_L (G)-E_L (G_i)}{E_L (G)} + + E_L (G) = \sum_{i=0}^n \lambda_i^2 + + Where $E_L (G)$ is the Laplacian energy of graph `G`, + E_L (G_i) is the Laplacian energy of graph `G` after deleting node ``i`` + and $\lambda_i$ are the eigenvalues of `G`'s Laplacian matrix. + This formula shows the normalized value. Without normalization, + the numerator on the right side is returned. + + Parameters + ---------- + G : graph + A networkx graph + + normalized : bool (default = True) + If True the centrality score is scaled so the sum over all nodes is 1. + If False the centrality score for each node is the drop in Laplacian + energy when that node is removed. + + nodelist : list, optional (default = None) + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight: string or None, optional (default=`weight`) + Optional parameter `weight` to compute the Laplacian matrix. + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + walk_type : string or None, optional (default=None) + Optional parameter `walk_type` used when calling + :func:`directed_laplacian_matrix `. + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. + + alpha : real (default = 0.95) + Optional parameter `alpha` used when calling + :func:`directed_laplacian_matrix `. + (1 - alpha) is the teleportation probability used with pagerank. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with Laplacian centrality as the value. + + Examples + -------- + >>> G = nx.Graph() + >>> edges = [(0, 1, 4), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2), (4, 5, 1)] + >>> G.add_weighted_edges_from(edges) + >>> sorted((v, f"{c:0.2f}") for v, c in laplacian_centrality(G).items()) + [(0, '0.70'), (1, '0.90'), (2, '0.28'), (3, '0.22'), (4, '0.26'), (5, '0.04')] + + Notes + ----- + The algorithm is implemented based on [1]_ with an extension to directed graphs + using the ``directed_laplacian_matrix`` function. + + Raises + ------ + NetworkXPointlessConcept + If the graph `G` is the null graph. + ZeroDivisionError + If the graph `G` has no edges (is empty) and normalization is requested. + + References + ---------- + .. [1] Qi, X., Fuller, E., Wu, Q., Wu, Y., and Zhang, C.-Q. (2012). + Laplacian centrality: A new centrality measure for weighted networks. + Information Sciences, 194:240-253. + https://math.wvu.edu/~cqzhang/Publication-files/my-paper/INS-2012-Laplacian-W.pdf + + See Also + -------- + :func:`~networkx.linalg.laplacianmatrix.directed_laplacian_matrix` + :func:`~networkx.linalg.laplacianmatrix.laplacian_matrix` + """ + import numpy as np + import scipy as sp + + if len(G) == 0: + raise nx.NetworkXPointlessConcept("null graph has no centrality defined") + if G.size(weight=weight) == 0: + if normalized: + raise ZeroDivisionError("graph with no edges has zero full energy") + return dict.fromkeys(G, 0) + + if nodelist is not None: + nodeset = set(G.nbunch_iter(nodelist)) + if len(nodeset) != len(nodelist): + raise nx.NetworkXError("nodelist has duplicate nodes or nodes not in G") + nodes = nodelist + [n for n in G if n not in nodeset] + else: + nodelist = nodes = list(G) + + if G.is_directed(): + lap_matrix = nx.directed_laplacian_matrix(G, nodes, weight, walk_type, alpha) + else: + lap_matrix = nx.laplacian_matrix(G, nodes, weight).toarray() + + full_energy = np.sum(lap_matrix**2) + + # calculate laplacian centrality + laplace_centralities_dict = {} + for i, node in enumerate(nodelist): + # remove row and col i from lap_matrix + all_but_i = list(np.arange(lap_matrix.shape[0])) + all_but_i.remove(i) + A_2 = lap_matrix[all_but_i, :][:, all_but_i] + + # Adjust diagonal for removed row + new_diag = lap_matrix.diagonal() - abs(lap_matrix[:, i]) + np.fill_diagonal(A_2, new_diag[all_but_i]) + + if len(all_but_i) > 0: # catches degenerate case of single node + new_energy = np.sum(A_2**2) + else: + new_energy = 0.0 + + lapl_cent = full_energy - new_energy + if normalized: + lapl_cent = lapl_cent / full_energy + + laplace_centralities_dict[node] = float(lapl_cent) + + return laplace_centralities_dict diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/load.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/load.py new file mode 100644 index 0000000..fc46edd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/load.py @@ -0,0 +1,200 @@ +"""Load centrality.""" + +from operator import itemgetter + +import networkx as nx + +__all__ = ["load_centrality", "edge_load_centrality"] + + +@nx._dispatchable(edge_attrs="weight") +def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None): + """Compute load centrality for nodes. + + The load centrality of a node is the fraction of all shortest + paths that pass through that node. + + Parameters + ---------- + G : graph + A networkx graph. + + normalized : bool, optional (default=True) + If True the betweenness values are normalized by b=b/(n-1)(n-2) where + n is the number of nodes in G. + + weight : None or string, optional (default=None) + If None, edge weights are ignored. + Otherwise holds the name of the edge attribute used as weight. + The weight of an edge is treated as the length or distance between the two sides. + + cutoff : bool, optional (default=None) + If specified, only consider paths of length <= cutoff. + + Returns + ------- + nodes : dictionary + Dictionary of nodes with centrality as the value. + + See Also + -------- + betweenness_centrality + + Notes + ----- + Load centrality is slightly different than betweenness. It was originally + introduced by [2]_. For this load algorithm see [1]_. + + References + ---------- + .. [1] Mark E. J. Newman: + Scientific collaboration networks. II. + Shortest paths, weighted networks, and centrality. + Physical Review E 64, 016132, 2001. + http://journals.aps.org/pre/abstract/10.1103/PhysRevE.64.016132 + .. [2] Kwang-Il Goh, Byungnam Kahng and Doochul Kim + Universal behavior of Load Distribution in Scale-Free Networks. + Physical Review Letters 87(27):1–4, 2001. + https://doi.org/10.1103/PhysRevLett.87.278701 + """ + if v is not None: # only one node + betweenness = 0.0 + for source in G: + ubetween = _node_betweenness(G, source, cutoff, False, weight) + betweenness += ubetween[v] if v in ubetween else 0 + if normalized: + order = G.order() + if order <= 2: + return betweenness # no normalization b=0 for all nodes + betweenness *= 1.0 / ((order - 1) * (order - 2)) + else: + betweenness = {}.fromkeys(G, 0.0) + for source in betweenness: + ubetween = _node_betweenness(G, source, cutoff, False, weight) + for vk in ubetween: + betweenness[vk] += ubetween[vk] + if normalized: + order = G.order() + if order <= 2: + return betweenness # no normalization b=0 for all nodes + scale = 1.0 / ((order - 1) * (order - 2)) + for v in betweenness: + betweenness[v] *= scale + return betweenness # all nodes + + +def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None): + """Node betweenness_centrality helper: + + See betweenness_centrality for what you probably want. + This actually computes "load" and not betweenness. + See https://networkx.lanl.gov/ticket/103 + + This calculates the load of each node for paths from a single source. + (The fraction of number of shortests paths from source that go + through each node.) + + To get the load for a node you need to do all-pairs shortest paths. + + If weight is not None then use Dijkstra for finding shortest paths. + """ + # get the predecessor and path length data + if weight is None: + (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) + else: + (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight) + + # order the nodes by path length + onodes = [(l, vert) for (vert, l) in length.items()] + onodes.sort() + onodes[:] = [vert for (l, vert) in onodes if l > 0] + + # initialize betweenness + between = {}.fromkeys(length, 1.0) + + while onodes: + v = onodes.pop() + if v in pred: + num_paths = len(pred[v]) # Discount betweenness if more than + for x in pred[v]: # one shortest path. + if x == source: # stop if hit source because all remaining v + break # also have pred[v]==[source] + between[x] += between[v] / num_paths + # remove source + for v in between: + between[v] -= 1 + # rescale to be between 0 and 1 + if normalized: + l = len(between) + if l > 2: + # scale by 1/the number of possible paths + scale = 1 / ((l - 1) * (l - 2)) + for v in between: + between[v] *= scale + return between + + +load_centrality = newman_betweenness_centrality + + +@nx._dispatchable +def edge_load_centrality(G, cutoff=False): + """Compute edge load. + + WARNING: This concept of edge load has not been analysed + or discussed outside of NetworkX that we know of. + It is based loosely on load_centrality in the sense that + it counts the number of shortest paths which cross each edge. + This function is for demonstration and testing purposes. + + Parameters + ---------- + G : graph + A networkx graph + + cutoff : bool, optional (default=False) + If specified, only consider paths of length <= cutoff. + + Returns + ------- + A dict keyed by edge 2-tuple to the number of shortest paths + which use that edge. Where more than one path is shortest + the count is divided equally among paths. + """ + betweenness = {} + for u, v in G.edges(): + betweenness[(u, v)] = 0.0 + betweenness[(v, u)] = 0.0 + + for source in G: + ubetween = _edge_betweenness(G, source, cutoff=cutoff) + for e, ubetweenv in ubetween.items(): + betweenness[e] += ubetweenv # cumulative total + return betweenness + + +def _edge_betweenness(G, source, nodes=None, cutoff=False): + """Edge betweenness helper.""" + # get the predecessor data + (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) + # order the nodes by path length + onodes = [n for n, d in sorted(length.items(), key=itemgetter(1))] + # initialize betweenness, doesn't account for any edge weights + between = {} + for u, v in G.edges(nodes): + between[(u, v)] = 1.0 + between[(v, u)] = 1.0 + + while onodes: # work through all paths + v = onodes.pop() + if v in pred: + # Discount betweenness if more than one shortest path. + num_paths = len(pred[v]) + for w in pred[v]: + if w in pred: + # Discount betweenness, mult path + num_paths = len(pred[w]) + for x in pred[w]: + between[(w, x)] += between[(v, w)] / num_paths + between[(x, w)] += between[(w, v)] / num_paths + return between diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/percolation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/percolation.py new file mode 100644 index 0000000..0d4c871 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/percolation.py @@ -0,0 +1,128 @@ +"""Percolation centrality measures.""" + +import networkx as nx +from networkx.algorithms.centrality.betweenness import ( + _single_source_dijkstra_path_basic as dijkstra, +) +from networkx.algorithms.centrality.betweenness import ( + _single_source_shortest_path_basic as shortest_path, +) + +__all__ = ["percolation_centrality"] + + +@nx._dispatchable(node_attrs="attribute", edge_attrs="weight") +def percolation_centrality(G, attribute="percolation", states=None, weight=None): + r"""Compute the percolation centrality for nodes. + + Percolation centrality of a node $v$, at a given time, is defined + as the proportion of ‘percolated paths’ that go through that node. + + This measure quantifies relative impact of nodes based on their + topological connectivity, as well as their percolation states. + + Percolation states of nodes are used to depict network percolation + scenarios (such as during infection transmission in a social network + of individuals, spreading of computer viruses on computer networks, or + transmission of disease over a network of towns) over time. In this + measure usually the percolation state is expressed as a decimal + between 0.0 and 1.0. + + When all nodes are in the same percolated state this measure is + equivalent to betweenness centrality. + + Parameters + ---------- + G : graph + A NetworkX graph. + + attribute : None or string, optional (default='percolation') + Name of the node attribute to use for percolation state, used + if `states` is None. If a node does not set the attribute the + state of that node will be set to the default value of 1. + If all nodes do not have the attribute all nodes will be set to + 1 and the centrality measure will be equivalent to betweenness centrality. + + states : None or dict, optional (default=None) + Specify percolation states for the nodes, nodes as keys states + as values. + + weight : None or string, optional (default=None) + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + The weight of an edge is treated as the length or distance between the two sides. + + + Returns + ------- + nodes : dictionary + Dictionary of nodes with percolation centrality as the value. + + See Also + -------- + betweenness_centrality + + Notes + ----- + The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and + Liaquat Hossain [1]_ + Pair dependencies are calculated and accumulated using [2]_ + + For weighted graphs the edge weights must be greater than zero. + Zero edge weights can produce an infinite number of equal length + paths between pairs of nodes. + + References + ---------- + .. [1] Mahendra Piraveenan, Mikhail Prokopenko, Liaquat Hossain + Percolation Centrality: Quantifying Graph-Theoretic Impact of Nodes + during Percolation in Networks + http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0053095 + .. [2] Ulrik Brandes: + A Faster Algorithm for Betweenness Centrality. + Journal of Mathematical Sociology 25(2):163-177, 2001. + https://doi.org/10.1080/0022250X.2001.9990249 + """ + percolation = dict.fromkeys(G, 0.0) # b[v]=0 for v in G + + nodes = G + + if states is None: + states = nx.get_node_attributes(nodes, attribute, default=1) + + # sum of all percolation states + p_sigma_x_t = 0.0 + for v in states.values(): + p_sigma_x_t += v + + for s in nodes: + # single source shortest paths + if weight is None: # use BFS + S, P, sigma, _ = shortest_path(G, s) + else: # use Dijkstra's algorithm + S, P, sigma, _ = dijkstra(G, s, weight) + # accumulation + percolation = _accumulate_percolation( + percolation, S, P, sigma, s, states, p_sigma_x_t + ) + + n = len(G) + + for v in percolation: + percolation[v] *= 1 / (n - 2) + + return percolation + + +def _accumulate_percolation(percolation, S, P, sigma, s, states, p_sigma_x_t): + delta = dict.fromkeys(S, 0) + while S: + w = S.pop() + coeff = (1 + delta[w]) / sigma[w] + for v in P[w]: + delta[v] += sigma[v] * coeff + if w != s: + # percolation weight + pw_s_w = states[s] / (p_sigma_x_t - states[w]) + percolation[w] += delta[w] * pw_s_w + return percolation diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/reaching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/reaching.py new file mode 100644 index 0000000..23018af --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/reaching.py @@ -0,0 +1,209 @@ +"""Functions for computing reaching centrality of a node or a graph.""" + +import networkx as nx +from networkx.utils import pairwise + +__all__ = ["global_reaching_centrality", "local_reaching_centrality"] + + +def _average_weight(G, path, weight=None): + """Returns the average weight of an edge in a weighted path. + + Parameters + ---------- + G : graph + A networkx graph. + + path: list + A list of vertices that define the path. + + weight : None or string, optional (default=None) + If None, edge weights are ignored. Then the average weight of an edge + is assumed to be the multiplicative inverse of the length of the path. + Otherwise holds the name of the edge attribute used as weight. + """ + path_length = len(path) - 1 + if path_length <= 0: + return 0 + if weight is None: + return 1 / path_length + total_weight = sum(G.edges[i, j][weight] for i, j in pairwise(path)) + return total_weight / path_length + + +@nx._dispatchable(edge_attrs="weight") +def global_reaching_centrality(G, weight=None, normalized=True): + """Returns the global reaching centrality of a directed graph. + + The *global reaching centrality* of a weighted directed graph is the + average over all nodes of the difference between the local reaching + centrality of the node and the greatest local reaching centrality of + any node in the graph [1]_. For more information on the local + reaching centrality, see :func:`local_reaching_centrality`. + Informally, the local reaching centrality is the proportion of the + graph that is reachable from the neighbors of the node. + + Parameters + ---------- + G : DiGraph + A networkx DiGraph. + + weight : None or string, optional (default=None) + Attribute to use for edge weights. If ``None``, each edge weight + is assumed to be one. A higher weight implies a stronger + connection between nodes and a *shorter* path length. + + normalized : bool, optional (default=True) + Whether to normalize the edge weights by the total sum of edge + weights. + + Returns + ------- + h : float + The global reaching centrality of the graph. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge(1, 2) + >>> G.add_edge(1, 3) + >>> nx.global_reaching_centrality(G) + 1.0 + >>> G.add_edge(3, 2) + >>> nx.global_reaching_centrality(G) + 0.75 + + See also + -------- + local_reaching_centrality + + References + ---------- + .. [1] Mones, Enys, Lilla Vicsek, and Tamás Vicsek. + "Hierarchy Measure for Complex Networks." + *PLoS ONE* 7.3 (2012): e33799. + https://doi.org/10.1371/journal.pone.0033799 + """ + if nx.is_negatively_weighted(G, weight=weight): + raise nx.NetworkXError("edge weights must be positive") + total_weight = G.size(weight=weight) + if total_weight <= 0: + raise nx.NetworkXError("Size of G must be positive") + # If provided, weights must be interpreted as connection strength + # (so higher weights are more likely to be chosen). However, the + # shortest path algorithms in NetworkX assume the provided "weight" + # is actually a distance (so edges with higher weight are less + # likely to be chosen). Therefore we need to invert the weights when + # computing shortest paths. + # + # If weight is None, we leave it as-is so that the shortest path + # algorithm can use a faster, unweighted algorithm. + if weight is not None: + + def as_distance(u, v, d): + return total_weight / d.get(weight, 1) + + shortest_paths = dict(nx.shortest_path(G, weight=as_distance)) + else: + shortest_paths = dict(nx.shortest_path(G)) + + centrality = local_reaching_centrality + # TODO This can be trivially parallelized. + lrc = [ + centrality(G, node, paths=paths, weight=weight, normalized=normalized) + for node, paths in shortest_paths.items() + ] + + max_lrc = max(lrc) + return sum(max_lrc - c for c in lrc) / (len(G) - 1) + + +@nx._dispatchable(edge_attrs="weight") +def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True): + """Returns the local reaching centrality of a node in a directed + graph. + + The *local reaching centrality* of a node in a directed graph is the + proportion of other nodes reachable from that node [1]_. + + Parameters + ---------- + G : DiGraph + A NetworkX DiGraph. + + v : node + A node in the directed graph `G`. + + paths : dictionary (default=None) + If this is not `None` it must be a dictionary representation + of single-source shortest paths, as computed by, for example, + :func:`networkx.shortest_path` with source node `v`. Use this + keyword argument if you intend to invoke this function many + times but don't want the paths to be recomputed each time. + + weight : None or string, optional (default=None) + Attribute to use for edge weights. If `None`, each edge weight + is assumed to be one. A higher weight implies a stronger + connection between nodes and a *shorter* path length. + + normalized : bool, optional (default=True) + Whether to normalize the edge weights by the total sum of edge + weights. + + Returns + ------- + h : float + The local reaching centrality of the node ``v`` in the graph + ``G``. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edges_from([(1, 2), (1, 3)]) + >>> nx.local_reaching_centrality(G, 3) + 0.0 + >>> G.add_edge(3, 2) + >>> nx.local_reaching_centrality(G, 3) + 0.5 + + See also + -------- + global_reaching_centrality + + References + ---------- + .. [1] Mones, Enys, Lilla Vicsek, and Tamás Vicsek. + "Hierarchy Measure for Complex Networks." + *PLoS ONE* 7.3 (2012): e33799. + https://doi.org/10.1371/journal.pone.0033799 + """ + # Corner case: graph with single node containing a self-loop + if (total_weight := G.size(weight=weight)) > 0 and len(G) == 1: + raise nx.NetworkXError( + "local_reaching_centrality of a single node with self-loop not well-defined" + ) + if paths is None: + if nx.is_negatively_weighted(G, weight=weight): + raise nx.NetworkXError("edge weights must be positive") + if total_weight <= 0: + raise nx.NetworkXError("Size of G must be positive") + if weight is not None: + # Interpret weights as lengths. + def as_distance(u, v, d): + return total_weight / d.get(weight, 1) + + paths = nx.shortest_path(G, source=v, weight=as_distance) + else: + paths = nx.shortest_path(G, source=v) + # If the graph is unweighted, simply return the proportion of nodes + # reachable from the source node ``v``. + if weight is None and G.is_directed(): + return (len(paths) - 1) / (len(G) - 1) + if normalized and weight is not None: + norm = G.size(weight=weight) / G.size() + else: + norm = 1 + # TODO This can be trivially parallelized. + avgw = (_average_weight(G, path, weight=weight) for path in paths.values()) + sum_avg_weight = sum(avgw) / norm + return sum_avg_weight / (len(G) - 1) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/second_order.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/second_order.py new file mode 100644 index 0000000..35583cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/second_order.py @@ -0,0 +1,141 @@ +"""Copyright (c) 2015 – Thomson Licensing, SAS + +Redistribution and use in source and binary forms, with or without +modification, are permitted (subject to the limitations in the +disclaimer below) provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +* Neither the name of Thomson Licensing, or Technicolor, nor the names +of its contributors may be used to endorse or promote products derived +from this software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE +GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT +HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +# Authors: Erwan Le Merrer (erwan.lemerrer@technicolor.com) + +__all__ = ["second_order_centrality"] + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def second_order_centrality(G, weight="weight"): + """Compute the second order centrality for nodes of G. + + The second order centrality of a given node is the standard deviation of + the return times to that node of a perpetual random walk on G: + + Parameters + ---------- + G : graph + A NetworkX connected and undirected graph. + + weight : string or None, optional (default="weight") + The name of an edge attribute that holds the numerical value + used as a weight. If None then each edge has weight 1. + + Returns + ------- + nodes : dictionary + Dictionary keyed by node with second order centrality as the value. + + Examples + -------- + >>> G = nx.star_graph(10) + >>> soc = nx.second_order_centrality(G) + >>> print(sorted(soc.items(), key=lambda x: x[1])[0][0]) # pick first id + 0 + + Raises + ------ + NetworkXException + If the graph G is empty, non connected or has negative weights. + + See Also + -------- + betweenness_centrality + + Notes + ----- + Lower values of second order centrality indicate higher centrality. + + The algorithm is from Kermarrec, Le Merrer, Sericola and Trédan [1]_. + + This code implements the analytical version of the algorithm, i.e., + there is no simulation of a random walk process involved. The random walk + is here unbiased (corresponding to eq 6 of the paper [1]_), thus the + centrality values are the standard deviations for random walk return times + on the transformed input graph G (equal in-degree at each nodes by adding + self-loops). + + Complexity of this implementation, made to run locally on a single machine, + is O(n^3), with n the size of G, which makes it viable only for small + graphs. + + References + ---------- + .. [1] Anne-Marie Kermarrec, Erwan Le Merrer, Bruno Sericola, Gilles Trédan + "Second order centrality: Distributed assessment of nodes criticity in + complex networks", Elsevier Computer Communications 34(5):619-628, 2011. + """ + import numpy as np + + n = len(G) + + if n == 0: + raise nx.NetworkXException("Empty graph.") + if not nx.is_connected(G): + raise nx.NetworkXException("Non connected graph.") + if any(d.get(weight, 0) < 0 for u, v, d in G.edges(data=True)): + raise nx.NetworkXException("Graph has negative edge weights.") + + # balancing G for Metropolis-Hastings random walks + G = nx.DiGraph(G) + in_deg = dict(G.in_degree(weight=weight)) + d_max = max(in_deg.values()) + for i, deg in in_deg.items(): + if deg < d_max: + G.add_edge(i, i, weight=d_max - deg) + + P = nx.to_numpy_array(G) + P /= P.sum(axis=1)[:, np.newaxis] # to transition probability matrix + + def _Qj(P, j): + P = P.copy() + P[:, j] = 0 + return P + + M = np.empty([n, n]) + + for i in range(n): + M[:, i] = np.linalg.solve( + np.identity(n) - _Qj(P, i), np.ones([n, 1])[:, 0] + ) # eq 3 + + return dict( + zip( + G.nodes, + (float(np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1))) for i in range(n)), + ) + ) # eq 6 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/subgraph_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/subgraph_alg.py new file mode 100644 index 0000000..9295963 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/subgraph_alg.py @@ -0,0 +1,342 @@ +""" +Subraph centrality and communicability betweenness. +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "subgraph_centrality_exp", + "subgraph_centrality", + "communicability_betweenness_centrality", + "estrada_index", +] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def subgraph_centrality_exp(G): + r"""Returns the subgraph centrality for each node of G. + + Subgraph centrality of a node `n` is the sum of weighted closed + walks of all lengths starting and ending at node `n`. The weights + decrease with path length. Each closed walk is associated with a + connected subgraph ([1]_). + + Parameters + ---------- + G: graph + + Returns + ------- + nodes:dictionary + Dictionary of nodes with subgraph centrality as the value. + + Raises + ------ + NetworkXError + If the graph is not undirected and simple. + + See Also + -------- + subgraph_centrality: + Alternative algorithm of the subgraph centrality for each node of G. + + Notes + ----- + This version of the algorithm exponentiates the adjacency matrix. + + The subgraph centrality of a node `u` in G can be found using + the matrix exponential of the adjacency matrix of G [1]_, + + .. math:: + + SC(u)=(e^A)_{uu} . + + References + ---------- + .. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez, + "Subgraph centrality in complex networks", + Physical Review E 71, 056103 (2005). + https://arxiv.org/abs/cond-mat/0504730 + + Examples + -------- + (Example from [1]_) + + >>> G = nx.Graph( + ... [ + ... (1, 2), + ... (1, 5), + ... (1, 8), + ... (2, 3), + ... (2, 8), + ... (3, 4), + ... (3, 6), + ... (4, 5), + ... (4, 7), + ... (5, 6), + ... (6, 7), + ... (7, 8), + ... ] + ... ) + >>> sc = nx.subgraph_centrality_exp(G) + >>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)]) + ['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90'] + """ + # alternative implementation that calculates the matrix exponential + import scipy as sp + + nodelist = list(G) # ordering of nodes in matrix + A = nx.to_numpy_array(G, nodelist) + # convert to 0-1 matrix + A[A != 0.0] = 1 + expA = sp.linalg.expm(A) + # convert diagonal to dictionary keyed by node + sc = dict(zip(nodelist, map(float, expA.diagonal()))) + return sc + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def subgraph_centrality(G): + r"""Returns subgraph centrality for each node in G. + + Subgraph centrality of a node `n` is the sum of weighted closed + walks of all lengths starting and ending at node `n`. The weights + decrease with path length. Each closed walk is associated with a + connected subgraph ([1]_). + + Parameters + ---------- + G: graph + + Returns + ------- + nodes : dictionary + Dictionary of nodes with subgraph centrality as the value. + + Raises + ------ + NetworkXError + If the graph is not undirected and simple. + + See Also + -------- + subgraph_centrality_exp: + Alternative algorithm of the subgraph centrality for each node of G. + + Notes + ----- + This version of the algorithm computes eigenvalues and eigenvectors + of the adjacency matrix. + + Subgraph centrality of a node `u` in G can be found using + a spectral decomposition of the adjacency matrix [1]_, + + .. math:: + + SC(u)=\sum_{j=1}^{N}(v_{j}^{u})^2 e^{\lambda_{j}}, + + where `v_j` is an eigenvector of the adjacency matrix `A` of G + corresponding to the eigenvalue `\lambda_j`. + + Examples + -------- + (Example from [1]_) + + >>> G = nx.Graph( + ... [ + ... (1, 2), + ... (1, 5), + ... (1, 8), + ... (2, 3), + ... (2, 8), + ... (3, 4), + ... (3, 6), + ... (4, 5), + ... (4, 7), + ... (5, 6), + ... (6, 7), + ... (7, 8), + ... ] + ... ) + >>> sc = nx.subgraph_centrality(G) + >>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)]) + ['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90'] + + References + ---------- + .. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez, + "Subgraph centrality in complex networks", + Physical Review E 71, 056103 (2005). + https://arxiv.org/abs/cond-mat/0504730 + + """ + import numpy as np + + nodelist = list(G) # ordering of nodes in matrix + A = nx.to_numpy_array(G, nodelist) + # convert to 0-1 matrix + A[np.nonzero(A)] = 1 + w, v = np.linalg.eigh(A) + vsquare = np.array(v) ** 2 + expw = np.exp(w) + xg = vsquare @ expw + # convert vector dictionary keyed by node + sc = dict(zip(nodelist, map(float, xg))) + return sc + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def communicability_betweenness_centrality(G): + r"""Returns subgraph communicability for all pairs of nodes in G. + + Communicability betweenness measure makes use of the number of walks + connecting every pair of nodes as the basis of a betweenness centrality + measure. + + Parameters + ---------- + G: graph + + Returns + ------- + nodes : dictionary + Dictionary of nodes with communicability betweenness as the value. + + Raises + ------ + NetworkXError + If the graph is not undirected and simple. + + Notes + ----- + Let `G=(V,E)` be a simple undirected graph with `n` nodes and `m` edges, + and `A` denote the adjacency matrix of `G`. + + Let `G(r)=(V,E(r))` be the graph resulting from + removing all edges connected to node `r` but not the node itself. + + The adjacency matrix for `G(r)` is `A+E(r)`, where `E(r)` has nonzeros + only in row and column `r`. + + The subraph betweenness of a node `r` is [1]_ + + .. math:: + + \omega_{r} = \frac{1}{C}\sum_{p}\sum_{q}\frac{G_{prq}}{G_{pq}}, + p\neq q, q\neq r, + + where + `G_{prq}=(e^{A}_{pq} - (e^{A+E(r)})_{pq}` is the number of walks + involving node r, + `G_{pq}=(e^{A})_{pq}` is the number of closed walks starting + at node `p` and ending at node `q`, + and `C=(n-1)^{2}-(n-1)` is a normalization factor equal to the + number of terms in the sum. + + The resulting `\omega_{r}` takes values between zero and one. + The lower bound cannot be attained for a connected + graph, and the upper bound is attained in the star graph. + + References + ---------- + .. [1] Ernesto Estrada, Desmond J. Higham, Naomichi Hatano, + "Communicability Betweenness in Complex Networks" + Physica A 388 (2009) 764-774. + https://arxiv.org/abs/0905.4102 + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) + >>> cbc = nx.communicability_betweenness_centrality(G) + >>> print([f"{node} {cbc[node]:0.2f}" for node in sorted(cbc)]) + ['0 0.03', '1 0.45', '2 0.51', '3 0.45', '4 0.40', '5 0.19', '6 0.03'] + """ + import numpy as np + import scipy as sp + + nodelist = list(G) # ordering of nodes in matrix + n = len(nodelist) + A = nx.to_numpy_array(G, nodelist) + # convert to 0-1 matrix + A[np.nonzero(A)] = 1 + expA = sp.linalg.expm(A) + mapping = dict(zip(nodelist, range(n))) + cbc = {} + for v in G: + # remove row and col of node v + i = mapping[v] + row = A[i, :].copy() + col = A[:, i].copy() + A[i, :] = 0 + A[:, i] = 0 + B = (expA - sp.linalg.expm(A)) / expA + # sum with row/col of node v and diag set to zero + B[i, :] = 0 + B[:, i] = 0 + B -= np.diag(np.diag(B)) + cbc[v] = float(B.sum()) + # put row and col back + A[i, :] = row + A[:, i] = col + # rescale when more than two nodes + order = len(cbc) + if order > 2: + scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0)) + cbc = {node: value * scale for node, value in cbc.items()} + return cbc + + +@nx._dispatchable +def estrada_index(G): + r"""Returns the Estrada index of a the graph G. + + The Estrada Index is a topological index of folding or 3D "compactness" ([1]_). + + Parameters + ---------- + G: graph + + Returns + ------- + estrada index: float + + Raises + ------ + NetworkXError + If the graph is not undirected and simple. + + Notes + ----- + Let `G=(V,E)` be a simple undirected graph with `n` nodes and let + `\lambda_{1}\leq\lambda_{2}\leq\cdots\lambda_{n}` + be a non-increasing ordering of the eigenvalues of its adjacency + matrix `A`. The Estrada index is ([1]_, [2]_) + + .. math:: + EE(G)=\sum_{j=1}^n e^{\lambda _j}. + + References + ---------- + .. [1] E. Estrada, "Characterization of 3D molecular structure", + Chem. Phys. Lett. 319, 713 (2000). + https://doi.org/10.1016/S0009-2614(00)00158-5 + .. [2] José Antonio de la Peñaa, Ivan Gutman, Juan Rada, + "Estimating the Estrada index", + Linear Algebra and its Applications. 427, 1 (2007). + https://doi.org/10.1016/j.laa.2007.06.020 + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) + >>> ei = nx.estrada_index(G) + >>> print(f"{ei:0.5}") + 20.55 + """ + return sum(subgraph_centrality(G).values()) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..90f41b5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality.cpython-312.pyc new file mode 100644 index 0000000..52e42c7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality_subset.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality_subset.cpython-312.pyc new file mode 100644 index 0000000..384077a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_betweenness_centrality_subset.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-312.pyc new file mode 100644 index 0000000..8e79f13 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_closeness_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality.cpython-312.pyc new file mode 100644 index 0000000..62d6fe2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality_subset.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality_subset.cpython-312.pyc new file mode 100644 index 0000000..3348594 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_betweenness_centrality_subset.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_closeness.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_closeness.cpython-312.pyc new file mode 100644 index 0000000..c8175eb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_current_flow_closeness.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-312.pyc new file mode 100644 index 0000000..2914dd1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_degree_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-312.pyc new file mode 100644 index 0000000..a93c7f8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_dispersion.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-312.pyc new file mode 100644 index 0000000..5d133b7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_eigenvector_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_group.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_group.cpython-312.pyc new file mode 100644 index 0000000..10d39a7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_group.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-312.pyc new file mode 100644 index 0000000..517674c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_harmonic_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-312.pyc new file mode 100644 index 0000000..cc48685 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_katz_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_laplacian_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_laplacian_centrality.cpython-312.pyc new file mode 100644 index 0000000..2916871 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_laplacian_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_load_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_load_centrality.cpython-312.pyc new file mode 100644 index 0000000..0554db0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_load_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-312.pyc new file mode 100644 index 0000000..8d27d56 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_percolation_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-312.pyc new file mode 100644 index 0000000..56f1754 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_reaching.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-312.pyc new file mode 100644 index 0000000..1bf8409 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_second_order_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-312.pyc new file mode 100644 index 0000000..dbd82cf Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_subgraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_trophic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_trophic.cpython-312.pyc new file mode 100644 index 0000000..75a4295 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_trophic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_voterank.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_voterank.cpython-312.pyc new file mode 100644 index 0000000..5beb85f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/__pycache__/test_voterank.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py new file mode 100644 index 0000000..5c988ae --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality.py @@ -0,0 +1,903 @@ +import math + +import pytest + +import networkx as nx + + +def weighted_G(): + G = nx.Graph() + G.add_edge(0, 1, weight=3) + G.add_edge(0, 2, weight=2) + G.add_edge(0, 3, weight=6) + G.add_edge(0, 4, weight=4) + G.add_edge(1, 3, weight=5) + G.add_edge(1, 5, weight=5) + G.add_edge(2, 4, weight=1) + G.add_edge(3, 4, weight=2) + G.add_edge(3, 5, weight=1) + G.add_edge(4, 5, weight=4) + return G + + +class TestBetweennessCentrality: + def test_K5(self): + """Betweenness centrality: K5""" + G = nx.complete_graph(5) + b = nx.betweenness_centrality(G, weight=None, normalized=False) + b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_K5_endpoints(self): + """Betweenness centrality: K5 endpoints""" + G = nx.complete_graph(5) + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) + b_answer = {0: 4.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + # normalized = True case + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) + b_answer = {0: 0.4, 1: 0.4, 2: 0.4, 3: 0.4, 4: 0.4} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P3_normalized(self): + """Betweenness centrality: P3 normalized""" + G = nx.path_graph(3) + b = nx.betweenness_centrality(G, weight=None, normalized=True) + b_answer = {0: 0.0, 1: 1.0, 2: 0.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P3(self): + """Betweenness centrality: P3""" + G = nx.path_graph(3) + b_answer = {0: 0.0, 1: 1.0, 2: 0.0} + b = nx.betweenness_centrality(G, weight=None, normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_sample_from_P3(self): + """Betweenness centrality: P3 sample""" + G = nx.path_graph(3) + b_answer = {0: 0.0, 1: 1.0, 2: 0.0} + b = nx.betweenness_centrality(G, k=3, weight=None, normalized=False, seed=1) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + b = nx.betweenness_centrality(G, k=2, weight=None, normalized=False, seed=1) + # python versions give different results with same seed + b_approx1 = {0: 0.0, 1: 1.0, 2: 0.0} + b_approx2 = {0: 0.0, 1: 0.5, 2: 0.0} + for n in sorted(G): + assert b[n] in (b_approx1[n], b_approx2[n]) + + def test_P3_endpoints(self): + """Betweenness centrality: P3 endpoints""" + G = nx.path_graph(3) + b_answer = {0: 2.0, 1: 3.0, 2: 2.0} + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + # normalized = True case + b_answer = {0: 2 / 3, 1: 1.0, 2: 2 / 3} + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_krackhardt_kite_graph(self): + """Betweenness centrality: Krackhardt kite graph""" + G = nx.krackhardt_kite_graph() + b_answer = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } + for b in b_answer: + b_answer[b] /= 2 + b = nx.betweenness_centrality(G, weight=None, normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_krackhardt_kite_graph_normalized(self): + """Betweenness centrality: Krackhardt kite graph normalized""" + G = nx.krackhardt_kite_graph() + b_answer = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } + b = nx.betweenness_centrality(G, weight=None, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_florentine_families_graph(self): + """Betweenness centrality: Florentine families graph""" + G = nx.florentine_families_graph() + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } + + b = nx.betweenness_centrality(G, weight=None, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_les_miserables_graph(self): + """Betweenness centrality: Les Miserables graph""" + G = nx.les_miserables_graph() + b_answer = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.570, + "Labarre": 0.000, + "Marguerite": 0.000, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.041, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.130, + "MmeThenardier": 0.029, + "Thenardier": 0.075, + "Cosette": 0.024, + "Javert": 0.054, + "Fauchelevent": 0.026, + "Bamatabois": 0.008, + "Perpetue": 0.000, + "Simplice": 0.009, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.007, + "Boulatruelle": 0.000, + "Eponine": 0.011, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.165, + "Gillenormand": 0.020, + "Magnon": 0.000, + "MlleGillenormand": 0.048, + "MmePontmercy": 0.000, + "MlleVaubois": 0.000, + "LtGillenormand": 0.000, + "Marius": 0.132, + "BaronessT": 0.000, + "Mabeuf": 0.028, + "Enjolras": 0.043, + "Combeferre": 0.001, + "Prouvaire": 0.000, + "Feuilly": 0.001, + "Courfeyrac": 0.005, + "Bahorel": 0.002, + "Bossuet": 0.031, + "Joly": 0.002, + "Grantaire": 0.000, + "MotherPlutarch": 0.000, + "Gueulemer": 0.005, + "Babet": 0.005, + "Claquesous": 0.005, + "Montparnasse": 0.004, + "Toussaint": 0.000, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.000, + "MmeHucheloup": 0.000, + } + + b = nx.betweenness_centrality(G, weight=None, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_ladder_graph(self): + """Betweenness centrality: Ladder graph""" + G = nx.Graph() # ladder_graph(3) + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667} + for b in b_answer: + b_answer[b] /= 2 + b = nx.betweenness_centrality(G, weight=None, normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_disconnected_path(self): + """Betweenness centrality: disconnected path""" + G = nx.Graph() + nx.add_path(G, [0, 1, 2]) + nx.add_path(G, [3, 4, 5, 6]) + b_answer = {0: 0, 1: 1, 2: 0, 3: 0, 4: 2, 5: 2, 6: 0} + b = nx.betweenness_centrality(G, weight=None, normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_disconnected_path_endpoints(self): + """Betweenness centrality: disconnected path endpoints""" + G = nx.Graph() + nx.add_path(G, [0, 1, 2]) + nx.add_path(G, [3, 4, 5, 6]) + b_answer = {0: 2, 1: 3, 2: 2, 3: 3, 4: 5, 5: 5, 6: 3} + b = nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + # normalized = True case + b = nx.betweenness_centrality(G, weight=None, normalized=True, endpoints=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n] / 21, abs=1e-7) + + def test_directed_path(self): + """Betweenness centrality: directed path""" + G = nx.DiGraph() + nx.add_path(G, [0, 1, 2]) + b = nx.betweenness_centrality(G, weight=None, normalized=False) + b_answer = {0: 0.0, 1: 1.0, 2: 0.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_directed_path_normalized(self): + """Betweenness centrality: directed path normalized""" + G = nx.DiGraph() + nx.add_path(G, [0, 1, 2]) + b = nx.betweenness_centrality(G, weight=None, normalized=True) + b_answer = {0: 0.0, 1: 0.5, 2: 0.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + @pytest.mark.parametrize( + ("normalized", "endpoints", "is_directed", "k", "expected"), + [ + (True, True, True, None, {0: 1.0, 1: 0.4, 2: 0.4, 3: 0.4, 4: 0.4}), + (True, True, True, 1, {0: 1.0, 1: 1.0, 2: 0.25, 3: 0.25, 4: 0.25}), + (True, True, False, None, {0: 1.0, 1: 0.4, 2: 0.4, 3: 0.4, 4: 0.4}), + (True, True, False, 1, {0: 1.0, 1: 1.0, 2: 0.25, 3: 0.25, 4: 0.25}), + (True, False, True, None, {0: 1.0, 1: 0, 2: 0.0, 3: 0.0, 4: 0.0}), + (True, False, True, 1, {0: 1.0, 1: math.nan, 2: 0.0, 3: 0.0, 4: 0.0}), + (True, False, False, None, {0: 1.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}), + (True, False, False, 1, {0: 1.0, 1: math.nan, 2: 0.0, 3: 0.0, 4: 0.0}), + (False, True, True, None, {0: 20.0, 1: 8.0, 2: 8.0, 3: 8.0, 4: 8.0}), + (False, True, True, 1, {0: 20.0, 1: 20.0, 2: 5.0, 3: 5.0, 4: 5.0}), + (False, True, False, None, {0: 10.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0}), + (False, True, False, 1, {0: 10.0, 1: 10.0, 2: 2.5, 3: 2.5, 4: 2.5}), + (False, False, True, None, {0: 12.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}), + (False, False, True, 1, {0: 12.0, 1: math.nan, 2: 0.0, 3: 0.0, 4: 0.0}), + (False, False, False, None, {0: 6.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0}), + (False, False, False, 1, {0: 6.0, 1: math.nan, 2: 0.0, 3: 0.0, 4: 0.0}), + ], + ) + def test_scale_with_k_on_star_graph( + self, normalized, endpoints, is_directed, k, expected + ): + # seed=1 selects node 1 as the initial node when using k=1. + # Recall node 0 is the center of the star graph. + G = nx.star_graph(4) + if is_directed: + G = G.to_directed() + b = nx.betweenness_centrality( + G, k=k, seed=1, endpoints=endpoints, normalized=normalized + ) + assert b == pytest.approx(expected, nan_ok=True) + + @pytest.mark.parametrize( + ("normalized", "endpoints", "is_directed", "k", "expected"), + [ + ( + *(True, True, True, None), # Use *() splatting for better autoformat + {0: 14 / 20, 1: 14 / 20, 2: 14 / 20, 3: 14 / 20, 4: 14 / 20}, + ), + ( + *(True, True, True, 3), + {0: 9 / 12, 1: 11 / 12, 2: 9 / 12, 3: 6 / 12, 4: 7 / 12}, + ), + ( + *(True, True, False, None), + {0: 10 / 20, 1: 10 / 20, 2: 10 / 20, 3: 10 / 20, 4: 10 / 20}, + ), + ( + *(True, True, False, 3), + {0: 8 / 12, 1: 7 / 12, 2: 4 / 12, 3: 4 / 12, 4: 7 / 12}, + ), + ( + *(True, False, True, None), + {0: 6 / 12, 1: 6 / 12, 2: 6 / 12, 3: 6 / 12, 4: 6 / 12}, + ), + ( + *(True, False, True, 3), + # Use 6 instead of 9 for denominator for source nodes 0, 1, and 4 + {0: 3 / 6, 1: 5 / 6, 2: 6 / 9, 3: 3 / 9, 4: 1 / 6}, + ), + ( + *(True, False, False, None), + {0: 2 / 12, 1: 2 / 12, 2: 2 / 12, 3: 2 / 12, 4: 2 / 12}, + ), + ( + *(True, False, False, 3), + # Use 6 instead of 9 for denominator for source nodes 0, 1, and 4 + {0: 2 / 6, 1: 1 / 6, 2: 1 / 9, 3: 1 / 9, 4: 1 / 6}, + ), + (False, True, True, None, {0: 14, 1: 14, 2: 14, 3: 14, 4: 14}), + ( + *(False, True, True, 3), + {0: 9 * 5 / 3, 1: 11 * 5 / 3, 2: 9 * 5 / 3, 3: 6 * 5 / 3, 4: 7 * 5 / 3}, + ), + (False, True, False, None, {0: 5, 1: 5, 2: 5, 3: 5, 4: 5}), + ( + *(False, True, False, 3), + {0: 8 * 5 / 6, 1: 7 * 5 / 6, 2: 4 * 5 / 6, 3: 4 * 5 / 6, 4: 7 * 5 / 6}, + ), + (False, False, True, None, {0: 6, 1: 6, 2: 6, 3: 6, 4: 6}), + ( + *(False, False, True, 3), + # Use 2 instead of 3 for denominator for source nodes 0, 1, and 4 + {0: 3 * 4 / 2, 1: 5 * 4 / 2, 2: 6 * 4 / 3, 3: 3 * 4 / 3, 4: 1 * 4 / 2}, + ), + (False, False, False, None, {0: 1, 1: 1, 2: 1, 3: 1, 4: 1}), + ( + *(False, False, False, 3), + # Use 4 instead of 6 for denominator for source nodes 0, 1, and 4 + {0: 2 * 4 / 4, 1: 1 * 4 / 4, 2: 1 * 4 / 6, 3: 1 * 4 / 6, 4: 1 * 4 / 4}, + ), + ], + ) + def test_scale_with_k_on_cycle_graph( + self, normalized, endpoints, is_directed, k, expected + ): + # seed=1 selects nodes 0, 1, and 4 as the initial nodes when using k=3. + G = nx.cycle_graph(5, create_using=nx.DiGraph if is_directed else nx.Graph) + b = nx.betweenness_centrality( + G, k=k, seed=1, endpoints=endpoints, normalized=normalized + ) + assert b == pytest.approx(expected) + + def test_k_out_of_bounds_raises(self): + G = nx.cycle_graph(4) + with pytest.raises(ValueError, match="larger"): + nx.betweenness_centrality(G, k=5) + with pytest.raises(ValueError, match="negative"): + nx.betweenness_centrality(G, k=-1) + with pytest.raises(ZeroDivisionError): + nx.betweenness_centrality(G, k=0) + with pytest.raises(ZeroDivisionError): + nx.betweenness_centrality(G, k=0, normalized=False) + # Test edge case: use full population when k == len(G) + # Should we warn or raise instead? + b1 = nx.betweenness_centrality(G, k=4, endpoints=False) + b2 = nx.betweenness_centrality(G, endpoints=False) + assert b1 == b2 + + +class TestWeightedBetweennessCentrality: + def test_K5(self): + """Weighted betweenness centrality: K5""" + G = nx.complete_graph(5) + b = nx.betweenness_centrality(G, weight="weight", normalized=False) + b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P3_normalized(self): + """Weighted betweenness centrality: P3 normalized""" + G = nx.path_graph(3) + b = nx.betweenness_centrality(G, weight="weight", normalized=True) + b_answer = {0: 0.0, 1: 1.0, 2: 0.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P3(self): + """Weighted betweenness centrality: P3""" + G = nx.path_graph(3) + b_answer = {0: 0.0, 1: 1.0, 2: 0.0} + b = nx.betweenness_centrality(G, weight="weight", normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_krackhardt_kite_graph(self): + """Weighted betweenness centrality: Krackhardt kite graph""" + G = nx.krackhardt_kite_graph() + b_answer = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } + for b in b_answer: + b_answer[b] /= 2 + + b = nx.betweenness_centrality(G, weight="weight", normalized=False) + + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_krackhardt_kite_graph_normalized(self): + """Weighted betweenness centrality: + Krackhardt kite graph normalized + """ + G = nx.krackhardt_kite_graph() + b_answer = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } + b = nx.betweenness_centrality(G, weight="weight", normalized=True) + + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_florentine_families_graph(self): + """Weighted betweenness centrality: + Florentine families graph""" + G = nx.florentine_families_graph() + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } + + b = nx.betweenness_centrality(G, weight="weight", normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_les_miserables_graph(self): + """Weighted betweenness centrality: Les Miserables graph""" + G = nx.les_miserables_graph() + b_answer = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.454, + "Labarre": 0.000, + "Marguerite": 0.009, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.066, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.114, + "MmeThenardier": 0.046, + "Thenardier": 0.129, + "Cosette": 0.075, + "Javert": 0.193, + "Fauchelevent": 0.026, + "Bamatabois": 0.080, + "Perpetue": 0.000, + "Simplice": 0.001, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.023, + "Boulatruelle": 0.000, + "Eponine": 0.023, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.285, + "Gillenormand": 0.024, + "Magnon": 0.005, + "MlleGillenormand": 0.036, + "MmePontmercy": 0.005, + "MlleVaubois": 0.000, + "LtGillenormand": 0.015, + "Marius": 0.072, + "BaronessT": 0.004, + "Mabeuf": 0.089, + "Enjolras": 0.003, + "Combeferre": 0.000, + "Prouvaire": 0.000, + "Feuilly": 0.004, + "Courfeyrac": 0.001, + "Bahorel": 0.007, + "Bossuet": 0.028, + "Joly": 0.000, + "Grantaire": 0.036, + "MotherPlutarch": 0.000, + "Gueulemer": 0.025, + "Babet": 0.015, + "Claquesous": 0.042, + "Montparnasse": 0.050, + "Toussaint": 0.011, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.002, + "MmeHucheloup": 0.034, + } + + b = nx.betweenness_centrality(G, weight="weight", normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_ladder_graph(self): + """Weighted betweenness centrality: Ladder graph""" + G = nx.Graph() # ladder_graph(3) + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + b_answer = {0: 1.667, 1: 1.667, 2: 6.667, 3: 6.667, 4: 1.667, 5: 1.667} + for b in b_answer: + b_answer[b] /= 2 + b = nx.betweenness_centrality(G, weight="weight", normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_G(self): + """Weighted betweenness centrality: G""" + G = weighted_G() + b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0} + b = nx.betweenness_centrality(G, weight="weight", normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_G2(self): + """Weighted betweenness centrality: G2""" + G = nx.DiGraph() + G.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + + b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0} + + b = nx.betweenness_centrality(G, weight="weight", normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_G3(self): + """Weighted betweenness centrality: G3""" + G = nx.MultiGraph(weighted_G()) + es = list(G.edges(data=True))[::2] # duplicate every other edge + G.add_edges_from(es) + b_answer = {0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0} + b = nx.betweenness_centrality(G, weight="weight", normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_G4(self): + """Weighted betweenness centrality: G4""" + G = nx.MultiDiGraph() + G.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("s", "x", 6), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("x", "y", 3), + ("y", "s", 7), + ("y", "v", 6), + ("y", "v", 6), + ] + ) + + b_answer = {"y": 5.0, "x": 5.0, "s": 4.0, "u": 2.0, "v": 2.0} + + b = nx.betweenness_centrality(G, weight="weight", normalized=False) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + +class TestEdgeBetweennessCentrality: + def test_K5(self): + """Edge betweenness centrality: K5""" + G = nx.complete_graph(5) + b = nx.edge_betweenness_centrality(G, weight=None, normalized=False) + b_answer = dict.fromkeys(G.edges(), 1) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_normalized_K5(self): + """Edge betweenness centrality: K5""" + G = nx.complete_graph(5) + b = nx.edge_betweenness_centrality(G, weight=None, normalized=True) + b_answer = dict.fromkeys(G.edges(), 1 / 10) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_C4(self): + """Edge betweenness centrality: C4""" + G = nx.cycle_graph(4) + b = nx.edge_betweenness_centrality(G, weight=None, normalized=True) + b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2} + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n] / 6, abs=1e-7) + + def test_P4(self): + """Edge betweenness centrality: P4""" + G = nx.path_graph(4) + b = nx.edge_betweenness_centrality(G, weight=None, normalized=False) + b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3} + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_normalized_P4(self): + """Edge betweenness centrality: P4""" + G = nx.path_graph(4) + b = nx.edge_betweenness_centrality(G, weight=None, normalized=True) + b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3} + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n] / 6, abs=1e-7) + + def test_balanced_tree(self): + """Edge betweenness centrality: balanced tree""" + G = nx.balanced_tree(r=2, h=2) + b = nx.edge_betweenness_centrality(G, weight=None, normalized=False) + b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + +class TestWeightedEdgeBetweennessCentrality: + def test_K5(self): + """Edge betweenness centrality: K5""" + G = nx.complete_graph(5) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = dict.fromkeys(G.edges(), 1) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_C4(self): + """Edge betweenness centrality: C4""" + G = nx.cycle_graph(4) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = {(0, 1): 2, (0, 3): 2, (1, 2): 2, (2, 3): 2} + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P4(self): + """Edge betweenness centrality: P4""" + G = nx.path_graph(4) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = {(0, 1): 3, (1, 2): 4, (2, 3): 3} + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_balanced_tree(self): + """Edge betweenness centrality: balanced tree""" + G = nx.balanced_tree(r=2, h=2) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = {(0, 1): 12, (0, 2): 12, (1, 3): 6, (1, 4): 6, (2, 5): 6, (2, 6): 6} + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_weighted_graph(self): + """Edge betweenness centrality: weighted""" + eList = [ + (0, 1, 5), + (0, 2, 4), + (0, 3, 3), + (0, 4, 2), + (1, 2, 4), + (1, 3, 1), + (1, 4, 3), + (2, 4, 5), + (3, 4, 4), + ] + G = nx.Graph() + G.add_weighted_edges_from(eList) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = { + (0, 1): 0.0, + (0, 2): 1.0, + (0, 3): 2.0, + (0, 4): 1.0, + (1, 2): 2.0, + (1, 3): 3.5, + (1, 4): 1.5, + (2, 4): 1.0, + (3, 4): 0.5, + } + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_normalized_weighted_graph(self): + """Edge betweenness centrality: normalized weighted""" + eList = [ + (0, 1, 5), + (0, 2, 4), + (0, 3, 3), + (0, 4, 2), + (1, 2, 4), + (1, 3, 1), + (1, 4, 3), + (2, 4, 5), + (3, 4, 4), + ] + G = nx.Graph() + G.add_weighted_edges_from(eList) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True) + b_answer = { + (0, 1): 0.0, + (0, 2): 1.0, + (0, 3): 2.0, + (0, 4): 1.0, + (1, 2): 2.0, + (1, 3): 3.5, + (1, 4): 1.5, + (2, 4): 1.0, + (3, 4): 0.5, + } + norm = len(G) * (len(G) - 1) / 2 + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n] / norm, abs=1e-7) + + def test_weighted_multigraph(self): + """Edge betweenness centrality: weighted multigraph""" + eList = [ + (0, 1, 5), + (0, 1, 4), + (0, 2, 4), + (0, 3, 3), + (0, 3, 3), + (0, 4, 2), + (1, 2, 4), + (1, 3, 1), + (1, 3, 2), + (1, 4, 3), + (1, 4, 4), + (2, 4, 5), + (3, 4, 4), + (3, 4, 4), + ] + G = nx.MultiGraph() + G.add_weighted_edges_from(eList) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=False) + b_answer = { + (0, 1, 0): 0.0, + (0, 1, 1): 0.5, + (0, 2, 0): 1.0, + (0, 3, 0): 0.75, + (0, 3, 1): 0.75, + (0, 4, 0): 1.0, + (1, 2, 0): 2.0, + (1, 3, 0): 3.0, + (1, 3, 1): 0.0, + (1, 4, 0): 1.5, + (1, 4, 1): 0.0, + (2, 4, 0): 1.0, + (3, 4, 0): 0.25, + (3, 4, 1): 0.25, + } + for n in sorted(G.edges(keys=True)): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_normalized_weighted_multigraph(self): + """Edge betweenness centrality: normalized weighted multigraph""" + eList = [ + (0, 1, 5), + (0, 1, 4), + (0, 2, 4), + (0, 3, 3), + (0, 3, 3), + (0, 4, 2), + (1, 2, 4), + (1, 3, 1), + (1, 3, 2), + (1, 4, 3), + (1, 4, 4), + (2, 4, 5), + (3, 4, 4), + (3, 4, 4), + ] + G = nx.MultiGraph() + G.add_weighted_edges_from(eList) + b = nx.edge_betweenness_centrality(G, weight="weight", normalized=True) + b_answer = { + (0, 1, 0): 0.0, + (0, 1, 1): 0.5, + (0, 2, 0): 1.0, + (0, 3, 0): 0.75, + (0, 3, 1): 0.75, + (0, 4, 0): 1.0, + (1, 2, 0): 2.0, + (1, 3, 0): 3.0, + (1, 3, 1): 0.0, + (1, 4, 0): 1.5, + (1, 4, 1): 0.0, + (2, 4, 0): 1.0, + (3, 4, 0): 0.25, + (3, 4, 1): 0.25, + } + norm = len(G) * (len(G) - 1) / 2 + for n in sorted(G.edges(keys=True)): + assert b[n] == pytest.approx(b_answer[n] / norm, abs=1e-7) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py new file mode 100644 index 0000000..a35a401 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py @@ -0,0 +1,340 @@ +import pytest + +import networkx as nx + + +class TestSubsetBetweennessCentrality: + def test_K5(self): + """Betweenness Centrality Subset: K5""" + G = nx.complete_graph(5) + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[1, 3], weight=None + ) + b_answer = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P5_directed(self): + """Betweenness Centrality Subset: P5 directed""" + G = nx.DiGraph() + nx.add_path(G, range(5)) + b_answer = {0: 0, 1: 1, 2: 1, 3: 0, 4: 0, 5: 0} + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P5(self): + """Betweenness Centrality Subset: P5""" + G = nx.Graph() + nx.add_path(G, range(5)) + b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0, 4: 0, 5: 0} + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P5_multiple_target(self): + """Betweenness Centrality Subset: P5 multiple target""" + G = nx.Graph() + nx.add_path(G, range(5)) + b_answer = {0: 0, 1: 1, 2: 1, 3: 0.5, 4: 0, 5: 0} + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_box(self): + """Betweenness Centrality Subset: box""" + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + b_answer = {0: 0, 1: 0.25, 2: 0.25, 3: 0} + b = nx.betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_box_and_path(self): + """Betweenness Centrality Subset: box and path""" + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)]) + b_answer = {0: 0, 1: 0.5, 2: 0.5, 3: 0.5, 4: 0, 5: 0} + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_box_and_path2(self): + """Betweenness Centrality Subset: box and path multiple target""" + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)]) + b_answer = {0: 0, 1: 1.0, 2: 0.5, 20: 0.5, 3: 0.5, 4: 0} + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_diamond_multi_path(self): + """Betweenness Centrality Subset: Diamond Multi Path""" + G = nx.Graph() + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (1, 10), + (10, 11), + (11, 12), + (12, 9), + (2, 6), + (3, 6), + (4, 6), + (5, 7), + (7, 8), + (6, 8), + (8, 9), + ] + ) + b = nx.betweenness_centrality_subset(G, sources=[1], targets=[9], weight=None) + + expected_b = { + 1: 0, + 2: 1.0 / 10, + 3: 1.0 / 10, + 4: 1.0 / 10, + 5: 1.0 / 10, + 6: 3.0 / 10, + 7: 1.0 / 10, + 8: 4.0 / 10, + 9: 0, + 10: 1.0 / 10, + 11: 1.0 / 10, + 12: 1.0 / 10, + } + + for n in sorted(G): + assert b[n] == pytest.approx(expected_b[n], abs=1e-7) + + def test_normalized_p2(self): + """ + Betweenness Centrality Subset: Normalized P2 + if n <= 2: no normalization, betweenness centrality should be 0 for all nodes. + """ + G = nx.Graph() + nx.add_path(G, range(2)) + b_answer = {0: 0, 1: 0.0} + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[1], normalized=True, weight=None + ) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_normalized_P5_directed(self): + """Betweenness Centrality Subset: Normalized Directed P5""" + G = nx.DiGraph() + nx.add_path(G, range(5)) + b_answer = {0: 0, 1: 1.0 / 12.0, 2: 1.0 / 12.0, 3: 0, 4: 0, 5: 0} + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[3], normalized=True, weight=None + ) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_weighted_graph(self): + """Betweenness Centrality Subset: Weighted Graph""" + G = nx.DiGraph() + G.add_edge(0, 1, weight=3) + G.add_edge(0, 2, weight=2) + G.add_edge(0, 3, weight=6) + G.add_edge(0, 4, weight=4) + G.add_edge(1, 3, weight=5) + G.add_edge(1, 5, weight=5) + G.add_edge(2, 4, weight=1) + G.add_edge(3, 4, weight=2) + G.add_edge(3, 5, weight=1) + G.add_edge(4, 5, weight=4) + b_answer = {0: 0.0, 1: 0.0, 2: 0.5, 3: 0.5, 4: 0.5, 5: 0.0} + b = nx.betweenness_centrality_subset( + G, sources=[0], targets=[5], normalized=False, weight="weight" + ) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + +class TestEdgeSubsetBetweennessCentrality: + def test_K5(self): + """Edge betweenness subset centrality: K5""" + G = nx.complete_graph(5) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[1, 3], weight=None + ) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 3)] = b_answer[(0, 1)] = 0.5 + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P5_directed(self): + """Edge betweenness subset centrality: P5 directed""" + G = nx.DiGraph() + nx.add_path(G, range(5)) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1 + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P5(self): + """Edge betweenness subset centrality: P5""" + G = nx.Graph() + nx.add_path(G, range(5)) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5 + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P5_multiple_target(self): + """Edge betweenness subset centrality: P5 multiple target""" + G = nx.Graph() + nx.add_path(G, range(5)) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 1 + b_answer[(3, 4)] = 0.5 + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_box(self): + """Edge betweenness subset centrality: box""" + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 1)] = b_answer[(0, 2)] = 0.25 + b_answer[(1, 3)] = b_answer[(2, 3)] = 0.25 + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], weight=None + ) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_box_and_path(self): + """Edge betweenness subset centrality: box and path""" + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (4, 5)]) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 1)] = b_answer[(0, 2)] = 0.5 + b_answer[(1, 3)] = b_answer[(2, 3)] = 0.5 + b_answer[(3, 4)] = 0.5 + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_box_and_path2(self): + """Edge betweenness subset centrality: box and path multiple target""" + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 20), (20, 3), (3, 4)]) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 1)] = 1.0 + b_answer[(1, 20)] = b_answer[(3, 20)] = 0.5 + b_answer[(1, 2)] = b_answer[(2, 3)] = 0.5 + b_answer[(3, 4)] = 0.5 + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3, 4], weight=None + ) + for n in sorted(G.edges()): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_diamond_multi_path(self): + """Edge betweenness subset centrality: Diamond Multi Path""" + G = nx.Graph() + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (1, 10), + (10, 11), + (11, 12), + (12, 9), + (2, 6), + (3, 6), + (4, 6), + (5, 7), + (7, 8), + (6, 8), + (8, 9), + ] + ) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(8, 9)] = 0.4 + b_answer[(6, 8)] = b_answer[(7, 8)] = 0.2 + b_answer[(2, 6)] = b_answer[(3, 6)] = b_answer[(4, 6)] = 0.2 / 3.0 + b_answer[(1, 2)] = b_answer[(1, 3)] = b_answer[(1, 4)] = 0.2 / 3.0 + b_answer[(5, 7)] = 0.2 + b_answer[(1, 5)] = 0.2 + b_answer[(9, 12)] = 0.1 + b_answer[(11, 12)] = b_answer[(10, 11)] = b_answer[(1, 10)] = 0.1 + b = nx.edge_betweenness_centrality_subset( + G, sources=[1], targets=[9], weight=None + ) + for n in G.edges(): + sort_n = tuple(sorted(n)) + assert b[n] == pytest.approx(b_answer[sort_n], abs=1e-7) + + def test_normalized_p1(self): + """ + Edge betweenness subset centrality: P1 + if n <= 1: no normalization b=0 for all nodes + """ + G = nx.Graph() + nx.add_path(G, range(1)) + b_answer = dict.fromkeys(G.edges(), 0) + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[0], normalized=True, weight=None + ) + for n in G.edges(): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_normalized_P5_directed(self): + """Edge betweenness subset centrality: Normalized Directed P5""" + G = nx.DiGraph() + nx.add_path(G, range(5)) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 1)] = b_answer[(1, 2)] = b_answer[(2, 3)] = 0.05 + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[3], normalized=True, weight=None + ) + for n in G.edges(): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_weighted_graph(self): + """Edge betweenness subset centrality: Weighted Graph""" + G = nx.DiGraph() + G.add_edge(0, 1, weight=3) + G.add_edge(0, 2, weight=2) + G.add_edge(0, 3, weight=6) + G.add_edge(0, 4, weight=4) + G.add_edge(1, 3, weight=5) + G.add_edge(1, 5, weight=5) + G.add_edge(2, 4, weight=1) + G.add_edge(3, 4, weight=2) + G.add_edge(3, 5, weight=1) + G.add_edge(4, 5, weight=4) + b_answer = dict.fromkeys(G.edges(), 0) + b_answer[(0, 2)] = b_answer[(2, 4)] = b_answer[(4, 5)] = 0.5 + b_answer[(0, 3)] = b_answer[(3, 5)] = 0.5 + b = nx.edge_betweenness_centrality_subset( + G, sources=[0], targets=[5], normalized=False, weight="weight" + ) + for n in G.edges(): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py new file mode 100644 index 0000000..5419560 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_closeness_centrality.py @@ -0,0 +1,274 @@ +""" +Tests for closeness centrality. +""" + +import pytest + +import networkx as nx + + +@pytest.fixture() +def undirected_G(): + G = nx.fast_gnp_random_graph(n=100, p=0.6, seed=123) + cc = nx.closeness_centrality(G) + return G, cc + + +class TestClosenessCentrality: + def test_wf_improved(self): + G = nx.union(nx.path_graph(4), nx.path_graph([4, 5, 6])) + c = nx.closeness_centrality(G) + cwf = nx.closeness_centrality(G, wf_improved=False) + res = {0: 0.25, 1: 0.375, 2: 0.375, 3: 0.25, 4: 0.222, 5: 0.333, 6: 0.222} + wf_res = {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5, 4: 0.667, 5: 1.0, 6: 0.667} + for n in G: + assert c[n] == pytest.approx(res[n], abs=1e-3) + assert cwf[n] == pytest.approx(wf_res[n], abs=1e-3) + + def test_digraph(self): + G = nx.path_graph(3, create_using=nx.DiGraph) + c = nx.closeness_centrality(G) + cr = nx.closeness_centrality(G.reverse()) + d = {0: 0.0, 1: 0.500, 2: 0.667} + dr = {0: 0.667, 1: 0.500, 2: 0.0} + for n in G: + assert c[n] == pytest.approx(d[n], abs=1e-3) + assert cr[n] == pytest.approx(dr[n], abs=1e-3) + + def test_k5_closeness(self): + G = nx.complete_graph(5) + c = nx.closeness_centrality(G) + d = {0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000} + for n in G: + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_p3_closeness(self): + G = nx.path_graph(3) + c = nx.closeness_centrality(G) + d = {0: 0.667, 1: 1.000, 2: 0.667} + for n in G: + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_krackhardt_closeness(self): + G = nx.krackhardt_kite_graph() + c = nx.closeness_centrality(G) + d = { + 0: 0.529, + 1: 0.529, + 2: 0.500, + 3: 0.600, + 4: 0.500, + 5: 0.643, + 6: 0.643, + 7: 0.600, + 8: 0.429, + 9: 0.310, + } + for n in G: + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_florentine_families_closeness(self): + G = nx.florentine_families_graph() + c = nx.closeness_centrality(G) + d = { + "Acciaiuoli": 0.368, + "Albizzi": 0.483, + "Barbadori": 0.4375, + "Bischeri": 0.400, + "Castellani": 0.389, + "Ginori": 0.333, + "Guadagni": 0.467, + "Lamberteschi": 0.326, + "Medici": 0.560, + "Pazzi": 0.286, + "Peruzzi": 0.368, + "Ridolfi": 0.500, + "Salviati": 0.389, + "Strozzi": 0.4375, + "Tornabuoni": 0.483, + } + for n in G: + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_les_miserables_closeness(self): + G = nx.les_miserables_graph() + c = nx.closeness_centrality(G) + d = { + "Napoleon": 0.302, + "Myriel": 0.429, + "MlleBaptistine": 0.413, + "MmeMagloire": 0.413, + "CountessDeLo": 0.302, + "Geborand": 0.302, + "Champtercier": 0.302, + "Cravatte": 0.302, + "Count": 0.302, + "OldMan": 0.302, + "Valjean": 0.644, + "Labarre": 0.394, + "Marguerite": 0.413, + "MmeDeR": 0.394, + "Isabeau": 0.394, + "Gervais": 0.394, + "Listolier": 0.341, + "Tholomyes": 0.392, + "Fameuil": 0.341, + "Blacheville": 0.341, + "Favourite": 0.341, + "Dahlia": 0.341, + "Zephine": 0.341, + "Fantine": 0.461, + "MmeThenardier": 0.461, + "Thenardier": 0.517, + "Cosette": 0.478, + "Javert": 0.517, + "Fauchelevent": 0.402, + "Bamatabois": 0.427, + "Perpetue": 0.318, + "Simplice": 0.418, + "Scaufflaire": 0.394, + "Woman1": 0.396, + "Judge": 0.404, + "Champmathieu": 0.404, + "Brevet": 0.404, + "Chenildieu": 0.404, + "Cochepaille": 0.404, + "Pontmercy": 0.373, + "Boulatruelle": 0.342, + "Eponine": 0.396, + "Anzelma": 0.352, + "Woman2": 0.402, + "MotherInnocent": 0.398, + "Gribier": 0.288, + "MmeBurgon": 0.344, + "Jondrette": 0.257, + "Gavroche": 0.514, + "Gillenormand": 0.442, + "Magnon": 0.335, + "MlleGillenormand": 0.442, + "MmePontmercy": 0.315, + "MlleVaubois": 0.308, + "LtGillenormand": 0.365, + "Marius": 0.531, + "BaronessT": 0.352, + "Mabeuf": 0.396, + "Enjolras": 0.481, + "Combeferre": 0.392, + "Prouvaire": 0.357, + "Feuilly": 0.392, + "Courfeyrac": 0.400, + "Bahorel": 0.394, + "Bossuet": 0.475, + "Joly": 0.394, + "Grantaire": 0.358, + "MotherPlutarch": 0.285, + "Gueulemer": 0.463, + "Babet": 0.463, + "Claquesous": 0.452, + "Montparnasse": 0.458, + "Toussaint": 0.402, + "Child1": 0.342, + "Child2": 0.342, + "Brujon": 0.380, + "MmeHucheloup": 0.353, + } + for n in G: + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_weighted_closeness(self): + edges = [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + XG = nx.Graph() + XG.add_weighted_edges_from(edges) + c = nx.closeness_centrality(XG, distance="weight") + d = {"y": 0.200, "x": 0.286, "s": 0.138, "u": 0.235, "v": 0.200} + for n in sorted(XG): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + +class TestIncrementalClosenessCentrality: + @staticmethod + def pick_add_edge(G): + u = nx.utils.arbitrary_element(G) + possible_nodes = set(G) - (set(G.neighbors(u)) | {u}) + v = nx.utils.arbitrary_element(possible_nodes) + return (u, v) + + @staticmethod + def pick_remove_edge(G): + u = nx.utils.arbitrary_element(G) + possible_nodes = list(G.neighbors(u)) + v = nx.utils.arbitrary_element(possible_nodes) + return (u, v) + + def test_directed_raises(self): + dir_G = nx.gn_graph(n=5) + prev_cc = None + edge = self.pick_add_edge(dir_G) + with pytest.raises(nx.NetworkXNotImplemented): + nx.incremental_closeness_centrality(dir_G, edge, prev_cc, insertion=True) + + def test_wrong_size_prev_cc_raises(self, undirected_G): + G, prev_cc = undirected_G + edge = self.pick_add_edge(G) + prev_cc.pop(0) + with pytest.raises(nx.NetworkXError): + nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=True) + + def test_wrong_nodes_prev_cc_raises(self, undirected_G): + G, prev_cc = undirected_G + + edge = self.pick_add_edge(G) + num_nodes = len(prev_cc) + prev_cc.pop(0) + prev_cc[num_nodes] = 0.5 + with pytest.raises(nx.NetworkXError): + nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=True) + + def test_zero_centrality(self): + G = nx.path_graph(3) + prev_cc = nx.closeness_centrality(G) + edge = self.pick_remove_edge(G) + test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insertion=False) + G.remove_edges_from([edge]) + real_cc = nx.closeness_centrality(G) + shared_items = set(test_cc.items()) & set(real_cc.items()) + assert len(shared_items) == len(real_cc) + assert 0 in test_cc.values() + + def test_incremental(self, undirected_G): + # Check that incremental and regular give same output + G, _ = undirected_G + prev_cc = None + for i in range(5): + if i % 2 == 0: + # Remove an edge + insert = False + edge = self.pick_remove_edge(G) + else: + # Add an edge + insert = True + edge = self.pick_add_edge(G) + + test_cc = nx.incremental_closeness_centrality(G, edge, prev_cc, insert) + + if insert: + G.add_edges_from([edge]) + else: + G.remove_edges_from([edge]) + + real_cc = nx.closeness_centrality(G) + + assert set(test_cc.items()) == set(real_cc.items()) + + prev_cc = test_cc diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py new file mode 100644 index 0000000..4e3d438 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py @@ -0,0 +1,197 @@ +import pytest + +import networkx as nx +from networkx import approximate_current_flow_betweenness_centrality as approximate_cfbc +from networkx import edge_current_flow_betweenness_centrality as edge_current_flow + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +class TestFlowBetweennessCentrality: + def test_K4_normalized(self): + """Betweenness centrality: K4""" + G = nx.complete_graph(4) + b = nx.current_flow_betweenness_centrality(G, normalized=True) + b_answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + G.add_edge(0, 1, weight=0.5, other=0.3) + b = nx.current_flow_betweenness_centrality(G, normalized=True, weight=None) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + wb_answer = {0: 0.2222222, 1: 0.2222222, 2: 0.30555555, 3: 0.30555555} + b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="weight") + for n in sorted(G): + assert b[n] == pytest.approx(wb_answer[n], abs=1e-7) + wb_answer = {0: 0.2051282, 1: 0.2051282, 2: 0.33974358, 3: 0.33974358} + b = nx.current_flow_betweenness_centrality(G, normalized=True, weight="other") + for n in sorted(G): + assert b[n] == pytest.approx(wb_answer[n], abs=1e-7) + + def test_K4(self): + """Betweenness centrality: K4""" + G = nx.complete_graph(4) + for solver in ["full", "lu", "cg"]: + b = nx.current_flow_betweenness_centrality( + G, normalized=False, solver=solver + ) + b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P4_normalized(self): + """Betweenness centrality: P4 normalized""" + G = nx.path_graph(4) + b = nx.current_flow_betweenness_centrality(G, normalized=True) + b_answer = {0: 0, 1: 2.0 / 3, 2: 2.0 / 3, 3: 0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P4(self): + """Betweenness centrality: P4""" + G = nx.path_graph(4) + b = nx.current_flow_betweenness_centrality(G, normalized=False) + b_answer = {0: 0, 1: 2, 2: 2, 3: 0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_star(self): + """Betweenness centrality: star""" + G = nx.Graph() + nx.add_star(G, ["a", "b", "c", "d"]) + b = nx.current_flow_betweenness_centrality(G, normalized=True) + b_answer = {"a": 1.0, "b": 0.0, "c": 0.0, "d": 0.0} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_solvers2(self): + """Betweenness centrality: alternate solvers""" + G = nx.complete_graph(4) + for solver in ["full", "lu", "cg"]: + b = nx.current_flow_betweenness_centrality( + G, normalized=False, solver=solver + ) + b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + +class TestApproximateFlowBetweennessCentrality: + def test_K4_normalized(self): + "Approximate current-flow betweenness centrality: K4 normalized" + G = nx.complete_graph(4) + b = nx.current_flow_betweenness_centrality(G, normalized=True) + epsilon = 0.1 + ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon) + for n in sorted(G): + np.testing.assert_allclose(b[n], ba[n], atol=epsilon) + + def test_K4(self): + "Approximate current-flow betweenness centrality: K4" + G = nx.complete_graph(4) + b = nx.current_flow_betweenness_centrality(G, normalized=False) + epsilon = 0.1 + ba = approximate_cfbc(G, normalized=False, epsilon=0.5 * epsilon) + for n in sorted(G): + np.testing.assert_allclose(b[n], ba[n], atol=epsilon * len(G) ** 2) + + def test_star(self): + "Approximate current-flow betweenness centrality: star" + G = nx.Graph() + nx.add_star(G, ["a", "b", "c", "d"]) + b = nx.current_flow_betweenness_centrality(G, normalized=True) + epsilon = 0.1 + ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon) + for n in sorted(G): + np.testing.assert_allclose(b[n], ba[n], atol=epsilon) + + def test_grid(self): + "Approximate current-flow betweenness centrality: 2d grid" + G = nx.grid_2d_graph(4, 4) + b = nx.current_flow_betweenness_centrality(G, normalized=True) + epsilon = 0.1 + ba = approximate_cfbc(G, normalized=True, epsilon=0.5 * epsilon) + for n in sorted(G): + np.testing.assert_allclose(b[n], ba[n], atol=epsilon) + + def test_seed(self): + G = nx.complete_graph(4) + b = approximate_cfbc(G, normalized=False, epsilon=0.05, seed=1) + b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} + for n in sorted(G): + np.testing.assert_allclose(b[n], b_answer[n], atol=0.1) + + def test_solvers(self): + "Approximate current-flow betweenness centrality: solvers" + G = nx.complete_graph(4) + epsilon = 0.1 + for solver in ["full", "lu", "cg"]: + b = approximate_cfbc( + G, normalized=False, solver=solver, epsilon=0.5 * epsilon + ) + b_answer = {0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} + for n in sorted(G): + np.testing.assert_allclose(b[n], b_answer[n], atol=epsilon) + + def test_lower_kmax(self): + G = nx.complete_graph(4) + with pytest.raises(nx.NetworkXError, match="Increase kmax or epsilon"): + nx.approximate_current_flow_betweenness_centrality(G, kmax=4) + + +class TestWeightedFlowBetweennessCentrality: + pass + + +class TestEdgeFlowBetweennessCentrality: + def test_K4(self): + """Edge flow betweenness centrality: K4""" + G = nx.complete_graph(4) + b = edge_current_flow(G, normalized=True) + b_answer = dict.fromkeys(G.edges(), 0.25) + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + def test_K4_normalized(self): + """Edge flow betweenness centrality: K4""" + G = nx.complete_graph(4) + b = edge_current_flow(G, normalized=False) + b_answer = dict.fromkeys(G.edges(), 0.75) + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + def test_C4(self): + """Edge flow betweenness centrality: C4""" + G = nx.cycle_graph(4) + b = edge_current_flow(G, normalized=False) + b_answer = {(0, 1): 1.25, (0, 3): 1.25, (1, 2): 1.25, (2, 3): 1.25} + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + def test_P4(self): + """Edge betweenness centrality: P4""" + G = nx.path_graph(4) + b = edge_current_flow(G, normalized=False) + b_answer = {(0, 1): 1.5, (1, 2): 2.0, (2, 3): 1.5} + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + +@pytest.mark.parametrize( + "centrality_func", + ( + nx.current_flow_betweenness_centrality, + nx.edge_current_flow_betweenness_centrality, + nx.approximate_current_flow_betweenness_centrality, + ), +) +def test_unconnected_graphs_betweenness_centrality(centrality_func): + G = nx.Graph([(1, 2), (3, 4)]) + G.add_node(5) + with pytest.raises(nx.NetworkXError, match="Graph not connected"): + centrality_func(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py new file mode 100644 index 0000000..7b1611b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py @@ -0,0 +1,147 @@ +import pytest + +pytest.importorskip("numpy") +pytest.importorskip("scipy") + +import networkx as nx +from networkx import edge_current_flow_betweenness_centrality as edge_current_flow +from networkx import ( + edge_current_flow_betweenness_centrality_subset as edge_current_flow_subset, +) + + +class TestFlowBetweennessCentrality: + def test_K4_normalized(self): + """Betweenness centrality: K4""" + G = nx.complete_graph(4) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) + b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_K4(self): + """Betweenness centrality: K4""" + G = nx.complete_graph(4) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) + b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + # test weighted network + G.add_edge(0, 1, weight=0.5, other=0.3) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True, weight=None + ) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) + b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True, weight="other" + ) + b_answer = nx.current_flow_betweenness_centrality( + G, normalized=True, weight="other" + ) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P4_normalized(self): + """Betweenness centrality: P4 normalized""" + G = nx.path_graph(4) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) + b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P4(self): + """Betweenness centrality: P4""" + G = nx.path_graph(4) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) + b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_star(self): + """Betweenness centrality: star""" + G = nx.Graph() + nx.add_star(G, ["a", "b", "c", "d"]) + b = nx.current_flow_betweenness_centrality_subset( + G, list(G), list(G), normalized=True + ) + b_answer = nx.current_flow_betweenness_centrality(G, normalized=True) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + +# class TestWeightedFlowBetweennessCentrality(): +# pass + + +class TestEdgeFlowBetweennessCentrality: + def test_K4_normalized(self): + """Betweenness centrality: K4""" + G = nx.complete_graph(4) + b = edge_current_flow_subset(G, list(G), list(G), normalized=True) + b_answer = edge_current_flow(G, normalized=True) + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + def test_K4(self): + """Betweenness centrality: K4""" + G = nx.complete_graph(4) + b = edge_current_flow_subset(G, list(G), list(G), normalized=False) + b_answer = edge_current_flow(G, normalized=False) + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + # test weighted network + G.add_edge(0, 1, weight=0.5, other=0.3) + b = edge_current_flow_subset(G, list(G), list(G), normalized=False, weight=None) + # weight is None => same as unweighted network + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + b = edge_current_flow_subset(G, list(G), list(G), normalized=False) + b_answer = edge_current_flow(G, normalized=False) + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + b = edge_current_flow_subset( + G, list(G), list(G), normalized=False, weight="other" + ) + b_answer = edge_current_flow(G, normalized=False, weight="other") + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + def test_C4(self): + """Edge betweenness centrality: C4""" + G = nx.cycle_graph(4) + b = edge_current_flow_subset(G, list(G), list(G), normalized=True) + b_answer = edge_current_flow(G, normalized=True) + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) + + def test_P4(self): + """Edge betweenness centrality: P4""" + G = nx.path_graph(4) + b = edge_current_flow_subset(G, list(G), list(G), normalized=True) + b_answer = edge_current_flow(G, normalized=True) + for (s, t), v1 in b_answer.items(): + v2 = b.get((s, t), b.get((t, s))) + assert v1 == pytest.approx(v2, abs=1e-7) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_closeness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_closeness.py new file mode 100644 index 0000000..2528d62 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_current_flow_closeness.py @@ -0,0 +1,43 @@ +import pytest + +pytest.importorskip("numpy") +pytest.importorskip("scipy") + +import networkx as nx + + +class TestFlowClosenessCentrality: + def test_K4(self): + """Closeness centrality: K4""" + G = nx.complete_graph(4) + b = nx.current_flow_closeness_centrality(G) + b_answer = {0: 2.0 / 3, 1: 2.0 / 3, 2: 2.0 / 3, 3: 2.0 / 3} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P4(self): + """Closeness centrality: P4""" + G = nx.path_graph(4) + b = nx.current_flow_closeness_centrality(G) + b_answer = {0: 1.0 / 6, 1: 1.0 / 4, 2: 1.0 / 4, 3: 1.0 / 6} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_star(self): + """Closeness centrality: star""" + G = nx.Graph() + nx.add_star(G, ["a", "b", "c", "d"]) + b = nx.current_flow_closeness_centrality(G) + b_answer = {"a": 1.0 / 3, "b": 0.6 / 3, "c": 0.6 / 3, "d": 0.6 / 3} + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_current_flow_closeness_centrality_not_connected(self): + G = nx.Graph() + G.add_nodes_from([1, 2, 3]) + with pytest.raises(nx.NetworkXError): + nx.current_flow_closeness_centrality(G) + + +class TestWeightedFlowClosenessCentrality: + pass diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py new file mode 100644 index 0000000..e39aa3b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_degree_centrality.py @@ -0,0 +1,144 @@ +""" +Unit tests for degree centrality. +""" + +import pytest + +import networkx as nx + + +class TestDegreeCentrality: + def setup_method(self): + self.K = nx.krackhardt_kite_graph() + self.P3 = nx.path_graph(3) + self.K5 = nx.complete_graph(5) + + F = nx.Graph() # Florentine families + F.add_edge("Acciaiuoli", "Medici") + F.add_edge("Castellani", "Peruzzi") + F.add_edge("Castellani", "Strozzi") + F.add_edge("Castellani", "Barbadori") + F.add_edge("Medici", "Barbadori") + F.add_edge("Medici", "Ridolfi") + F.add_edge("Medici", "Tornabuoni") + F.add_edge("Medici", "Albizzi") + F.add_edge("Medici", "Salviati") + F.add_edge("Salviati", "Pazzi") + F.add_edge("Peruzzi", "Strozzi") + F.add_edge("Peruzzi", "Bischeri") + F.add_edge("Strozzi", "Ridolfi") + F.add_edge("Strozzi", "Bischeri") + F.add_edge("Ridolfi", "Tornabuoni") + F.add_edge("Tornabuoni", "Guadagni") + F.add_edge("Albizzi", "Ginori") + F.add_edge("Albizzi", "Guadagni") + F.add_edge("Bischeri", "Guadagni") + F.add_edge("Guadagni", "Lamberteschi") + self.F = F + + G = nx.DiGraph() + G.add_edge(0, 5) + G.add_edge(1, 5) + G.add_edge(2, 5) + G.add_edge(3, 5) + G.add_edge(4, 5) + G.add_edge(5, 6) + G.add_edge(5, 7) + G.add_edge(5, 8) + self.G = G + + def test_degree_centrality_1(self): + d = nx.degree_centrality(self.K5) + exact = dict(zip(range(5), [1] * 5)) + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + def test_degree_centrality_2(self): + d = nx.degree_centrality(self.P3) + exact = {0: 0.5, 1: 1, 2: 0.5} + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + def test_degree_centrality_3(self): + d = nx.degree_centrality(self.K) + exact = { + 0: 0.444, + 1: 0.444, + 2: 0.333, + 3: 0.667, + 4: 0.333, + 5: 0.556, + 6: 0.556, + 7: 0.333, + 8: 0.222, + 9: 0.111, + } + for n, dc in d.items(): + assert exact[n] == pytest.approx(float(f"{dc:.3f}"), abs=1e-7) + + def test_degree_centrality_4(self): + d = nx.degree_centrality(self.F) + names = sorted(self.F.nodes()) + dcs = [ + 0.071, + 0.214, + 0.143, + 0.214, + 0.214, + 0.071, + 0.286, + 0.071, + 0.429, + 0.071, + 0.214, + 0.214, + 0.143, + 0.286, + 0.214, + ] + exact = dict(zip(names, dcs)) + for n, dc in d.items(): + assert exact[n] == pytest.approx(float(f"{dc:.3f}"), abs=1e-7) + + def test_indegree_centrality(self): + d = nx.in_degree_centrality(self.G) + exact = { + 0: 0.0, + 1: 0.0, + 2: 0.0, + 3: 0.0, + 4: 0.0, + 5: 0.625, + 6: 0.125, + 7: 0.125, + 8: 0.125, + } + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + def test_outdegree_centrality(self): + d = nx.out_degree_centrality(self.G) + exact = { + 0: 0.125, + 1: 0.125, + 2: 0.125, + 3: 0.125, + 4: 0.125, + 5: 0.375, + 6: 0.0, + 7: 0.0, + 8: 0.0, + } + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + def test_small_graph_centrality(self): + G = nx.empty_graph(create_using=nx.DiGraph) + assert {} == nx.degree_centrality(G) + assert {} == nx.out_degree_centrality(G) + assert {} == nx.in_degree_centrality(G) + + G = nx.empty_graph(1, create_using=nx.DiGraph) + assert {0: 1} == nx.degree_centrality(G) + assert {0: 1} == nx.out_degree_centrality(G) + assert {0: 1} == nx.in_degree_centrality(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py new file mode 100644 index 0000000..05de1c4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_dispersion.py @@ -0,0 +1,73 @@ +import networkx as nx + + +def small_ego_G(): + """The sample network from https://arxiv.org/pdf/1310.6753v1.pdf""" + edges = [ + ("a", "b"), + ("a", "c"), + ("b", "c"), + ("b", "d"), + ("b", "e"), + ("b", "f"), + ("c", "d"), + ("c", "f"), + ("c", "h"), + ("d", "f"), + ("e", "f"), + ("f", "h"), + ("h", "j"), + ("h", "k"), + ("i", "j"), + ("i", "k"), + ("j", "k"), + ("u", "a"), + ("u", "b"), + ("u", "c"), + ("u", "d"), + ("u", "e"), + ("u", "f"), + ("u", "g"), + ("u", "h"), + ("u", "i"), + ("u", "j"), + ("u", "k"), + ] + G = nx.Graph() + G.add_edges_from(edges) + + return G + + +class TestDispersion: + def test_article(self): + """our algorithm matches article's""" + G = small_ego_G() + disp_uh = nx.dispersion(G, "u", "h", normalized=False) + disp_ub = nx.dispersion(G, "u", "b", normalized=False) + assert disp_uh == 4 + assert disp_ub == 1 + + def test_results_length(self): + """there is a result for every node""" + G = small_ego_G() + disp = nx.dispersion(G) + disp_Gu = nx.dispersion(G, "u") + disp_uv = nx.dispersion(G, "u", "h") + assert len(disp) == len(G) + assert len(disp_Gu) == len(G) - 1 + assert isinstance(disp_uv, float) + + def test_dispersion_v_only(self): + G = small_ego_G() + disp_G_h = nx.dispersion(G, v="h", normalized=False) + disp_G_h_normalized = nx.dispersion(G, v="h", normalized=True) + assert disp_G_h == {"c": 0, "f": 0, "j": 0, "k": 0, "u": 4} + assert disp_G_h_normalized == {"c": 0.0, "f": 0.0, "j": 0.0, "k": 0.0, "u": 1.0} + + def test_impossible_things(self): + G = nx.karate_club_graph() + disp = nx.dispersion(G) + for u in disp: + for v in disp[u]: + assert disp[u][v] >= 0 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py new file mode 100644 index 0000000..4a73e1a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py @@ -0,0 +1,186 @@ +import math + +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +class TestEigenvectorCentrality: + def test_K5(self): + """Eigenvector centrality: K5""" + G = nx.complete_graph(5) + b = nx.eigenvector_centrality(G) + v = math.sqrt(1 / 5.0) + b_answer = dict.fromkeys(G, v) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + nstart = dict.fromkeys(G, 1) + b = nx.eigenvector_centrality(G, nstart=nstart) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + b = nx.eigenvector_centrality_numpy(G) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_P3(self): + """Eigenvector centrality: P3""" + G = nx.path_graph(3) + b_answer = {0: 0.5, 1: 0.7071, 2: 0.5} + b = nx.eigenvector_centrality_numpy(G) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + b = nx.eigenvector_centrality(G) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + def test_P3_unweighted(self): + """Eigenvector centrality: P3""" + G = nx.path_graph(3) + b_answer = {0: 0.5, 1: 0.7071, 2: 0.5} + b = nx.eigenvector_centrality_numpy(G, weight=None) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + def test_maxiter(self): + with pytest.raises(nx.PowerIterationFailedConvergence): + G = nx.path_graph(3) + nx.eigenvector_centrality(G, max_iter=0) + + +class TestEigenvectorCentralityDirected: + @classmethod + def setup_class(cls): + G = nx.DiGraph() + + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] + + G.add_edges_from(edges, weight=2.0) + cls.G = G.reverse() + cls.G.evc = [ + 0.25368793, + 0.19576478, + 0.32817092, + 0.40430835, + 0.48199885, + 0.15724483, + 0.51346196, + 0.32475403, + ] + + H = nx.DiGraph() + + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] + + G.add_edges_from(edges) + cls.H = G.reverse() + cls.H.evc = [ + 0.25368793, + 0.19576478, + 0.32817092, + 0.40430835, + 0.48199885, + 0.15724483, + 0.51346196, + 0.32475403, + ] + + def test_eigenvector_centrality_weighted(self): + G = self.G + p = nx.eigenvector_centrality(G) + for a, b in zip(list(p.values()), self.G.evc): + assert a == pytest.approx(b, abs=1e-4) + + def test_eigenvector_centrality_weighted_numpy(self): + G = self.G + p = nx.eigenvector_centrality_numpy(G) + for a, b in zip(list(p.values()), self.G.evc): + assert a == pytest.approx(b, abs=1e-7) + + def test_eigenvector_centrality_unweighted(self): + G = self.H + p = nx.eigenvector_centrality(G) + for a, b in zip(list(p.values()), self.G.evc): + assert a == pytest.approx(b, abs=1e-4) + + def test_eigenvector_centrality_unweighted_numpy(self): + G = self.H + p = nx.eigenvector_centrality_numpy(G) + for a, b in zip(list(p.values()), self.G.evc): + assert a == pytest.approx(b, abs=1e-7) + + +class TestEigenvectorCentralityExceptions: + def test_multigraph(self): + with pytest.raises(nx.NetworkXException): + nx.eigenvector_centrality(nx.MultiGraph()) + + def test_multigraph_numpy(self): + with pytest.raises(nx.NetworkXException): + nx.eigenvector_centrality_numpy(nx.MultiGraph()) + + def test_null(self): + with pytest.raises(nx.NetworkXException): + nx.eigenvector_centrality(nx.Graph()) + + def test_null_numpy(self): + with pytest.raises(nx.NetworkXException): + nx.eigenvector_centrality_numpy(nx.Graph()) + + @pytest.mark.parametrize( + "G", + [ + nx.empty_graph(3), + nx.DiGraph([(0, 1), (1, 2)]), + ], + ) + def test_disconnected_numpy(self, G): + msg = "does not give consistent results for disconnected" + with pytest.raises(nx.AmbiguousSolution, match=msg): + nx.eigenvector_centrality_numpy(G) + + def test_zero_nstart(self): + G = nx.Graph([(1, 2), (1, 3), (2, 3)]) + with pytest.raises( + nx.NetworkXException, match="initial vector cannot have all zero values" + ): + nx.eigenvector_centrality(G, nstart=dict.fromkeys(G, 0)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_group.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_group.py new file mode 100644 index 0000000..82343f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_group.py @@ -0,0 +1,277 @@ +""" +Tests for Group Centrality Measures +""" + +import pytest + +import networkx as nx + + +class TestGroupBetweennessCentrality: + def test_group_betweenness_single_node(self): + """ + Group betweenness centrality for single node group + """ + G = nx.path_graph(5) + C = [1] + b = nx.group_betweenness_centrality( + G, C, weight=None, normalized=False, endpoints=False + ) + b_answer = 3.0 + assert b == b_answer + + def test_group_betweenness_with_endpoints(self): + """ + Group betweenness centrality for single node group + """ + G = nx.path_graph(5) + C = [1] + b = nx.group_betweenness_centrality( + G, C, weight=None, normalized=False, endpoints=True + ) + b_answer = 7.0 + assert b == b_answer + + def test_group_betweenness_normalized(self): + """ + Group betweenness centrality for group with more than + 1 node and normalized + """ + G = nx.path_graph(5) + C = [1, 3] + b = nx.group_betweenness_centrality( + G, C, weight=None, normalized=True, endpoints=False + ) + b_answer = 1.0 + assert b == b_answer + + def test_two_group_betweenness_value_zero(self): + """ + Group betweenness centrality value of 0 + """ + G = nx.cycle_graph(7) + C = [[0, 1, 6], [0, 1, 5]] + b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False) + b_answer = [0.0, 3.0] + assert b == b_answer + + def test_group_betweenness_value_zero(self): + """ + Group betweenness centrality value of 0 + """ + G = nx.cycle_graph(6) + C = [0, 1, 5] + b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False) + b_answer = 0.0 + assert b == b_answer + + def test_group_betweenness_disconnected_graph(self): + """ + Group betweenness centrality in a disconnected graph + """ + G = nx.path_graph(5) + G.remove_edge(0, 1) + C = [1] + b = nx.group_betweenness_centrality(G, C, weight=None, normalized=False) + b_answer = 0.0 + assert b == b_answer + + def test_group_betweenness_node_not_in_graph(self): + """ + Node(s) in C not in graph, raises NodeNotFound exception + """ + with pytest.raises(nx.NodeNotFound): + nx.group_betweenness_centrality(nx.path_graph(5), [4, 7, 8]) + + def test_group_betweenness_directed_weighted(self): + """ + Group betweenness centrality in a directed and weighted graph + """ + G = nx.DiGraph() + G.add_edge(1, 0, weight=1) + G.add_edge(0, 2, weight=2) + G.add_edge(1, 2, weight=3) + G.add_edge(3, 1, weight=4) + G.add_edge(2, 3, weight=1) + G.add_edge(4, 3, weight=6) + G.add_edge(2, 4, weight=7) + C = [1, 2] + b = nx.group_betweenness_centrality(G, C, weight="weight", normalized=False) + b_answer = 5.0 + assert b == b_answer + + +class TestProminentGroup: + np = pytest.importorskip("numpy") + pd = pytest.importorskip("pandas") + + def test_prominent_group_single_node(self): + """ + Prominent group for single node + """ + G = nx.path_graph(5) + k = 1 + b, g = nx.prominent_group(G, k, normalized=False, endpoints=False) + b_answer, g_answer = 4.0, [2] + assert b == b_answer and g == g_answer + + def test_prominent_group_with_c(self): + """ + Prominent group without some nodes + """ + G = nx.path_graph(5) + k = 1 + b, g = nx.prominent_group(G, k, normalized=False, C=[2]) + b_answer, g_answer = 3.0, [1] + assert b == b_answer and g == g_answer + + def test_prominent_group_normalized_endpoints(self): + """ + Prominent group with normalized result, with endpoints + """ + G = nx.cycle_graph(7) + k = 2 + b, g = nx.prominent_group(G, k, normalized=True, endpoints=True) + b_answer, g_answer = 1.7, [2, 5] + assert b == b_answer and g == g_answer + + def test_prominent_group_disconnected_graph(self): + """ + Prominent group of disconnected graph + """ + G = nx.path_graph(6) + G.remove_edge(0, 1) + k = 1 + b, g = nx.prominent_group(G, k, weight=None, normalized=False) + b_answer, g_answer = 4.0, [3] + assert b == b_answer and g == g_answer + + def test_prominent_group_node_not_in_graph(self): + """ + Node(s) in C not in graph, raises NodeNotFound exception + """ + with pytest.raises(nx.NodeNotFound): + nx.prominent_group(nx.path_graph(5), 1, C=[10]) + + def test_group_betweenness_directed_weighted(self): + """ + Group betweenness centrality in a directed and weighted graph + """ + G = nx.DiGraph() + G.add_edge(1, 0, weight=1) + G.add_edge(0, 2, weight=2) + G.add_edge(1, 2, weight=3) + G.add_edge(3, 1, weight=4) + G.add_edge(2, 3, weight=1) + G.add_edge(4, 3, weight=6) + G.add_edge(2, 4, weight=7) + k = 2 + b, g = nx.prominent_group(G, k, weight="weight", normalized=False) + b_answer, g_answer = 5.0, [1, 2] + assert b == b_answer and g == g_answer + + def test_prominent_group_greedy_algorithm(self): + """ + Group betweenness centrality in a greedy algorithm + """ + G = nx.cycle_graph(7) + k = 2 + b, g = nx.prominent_group(G, k, normalized=True, endpoints=True, greedy=True) + b_answer, g_answer = 1.7, [6, 3] + assert b == b_answer and g == g_answer + + +class TestGroupClosenessCentrality: + def test_group_closeness_single_node(self): + """ + Group closeness centrality for a single node group + """ + G = nx.path_graph(5) + c = nx.group_closeness_centrality(G, [1]) + c_answer = nx.closeness_centrality(G, 1) + assert c == c_answer + + def test_group_closeness_disconnected(self): + """ + Group closeness centrality for a disconnected graph + """ + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4]) + c = nx.group_closeness_centrality(G, [1, 2]) + c_answer = 0 + assert c == c_answer + + def test_group_closeness_multiple_node(self): + """ + Group closeness centrality for a group with more than + 1 node + """ + G = nx.path_graph(4) + c = nx.group_closeness_centrality(G, [1, 2]) + c_answer = 1 + assert c == c_answer + + def test_group_closeness_node_not_in_graph(self): + """ + Node(s) in S not in graph, raises NodeNotFound exception + """ + with pytest.raises(nx.NodeNotFound): + nx.group_closeness_centrality(nx.path_graph(5), [6, 7, 8]) + + +class TestGroupDegreeCentrality: + def test_group_degree_centrality_single_node(self): + """ + Group degree centrality for a single node group + """ + G = nx.path_graph(4) + d = nx.group_degree_centrality(G, [1]) + d_answer = nx.degree_centrality(G)[1] + assert d == d_answer + + def test_group_degree_centrality_multiple_node(self): + """ + Group degree centrality for group with more than + 1 node + """ + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) + d = nx.group_degree_centrality(G, [1, 2]) + d_answer = 1 + assert d == d_answer + + def test_group_in_degree_centrality(self): + """ + Group in-degree centrality in a DiGraph + """ + G = nx.DiGraph() + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) + d = nx.group_in_degree_centrality(G, [1, 2]) + d_answer = 0 + assert d == d_answer + + def test_group_out_degree_centrality(self): + """ + Group out-degree centrality in a DiGraph + """ + G = nx.DiGraph() + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) + G.add_edges_from( + [(1, 2), (1, 3), (1, 6), (1, 7), (1, 8), (2, 3), (2, 4), (2, 5)] + ) + d = nx.group_out_degree_centrality(G, [1, 2]) + d_answer = 1 + assert d == d_answer + + def test_group_degree_centrality_node_not_in_graph(self): + """ + Node(s) in S not in graph, raises NetworkXError + """ + with pytest.raises(nx.NetworkXError): + nx.group_degree_centrality(nx.path_graph(5), [6, 7, 8]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_harmonic_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_harmonic_centrality.py new file mode 100644 index 0000000..4b3dc4a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_harmonic_centrality.py @@ -0,0 +1,122 @@ +""" +Tests for degree centrality. +""" + +import pytest + +import networkx as nx +from networkx.algorithms.centrality import harmonic_centrality + + +class TestClosenessCentrality: + @classmethod + def setup_class(cls): + cls.P3 = nx.path_graph(3) + cls.P4 = nx.path_graph(4) + cls.K5 = nx.complete_graph(5) + + cls.C4 = nx.cycle_graph(4) + cls.C4_directed = nx.cycle_graph(4, create_using=nx.DiGraph) + + cls.C5 = nx.cycle_graph(5) + + cls.T = nx.balanced_tree(r=2, h=2) + + cls.Gb = nx.DiGraph() + cls.Gb.add_edges_from([(0, 1), (0, 2), (0, 4), (2, 1), (2, 3), (4, 3)]) + + def test_p3_harmonic(self): + c = harmonic_centrality(self.P3) + d = {0: 1.5, 1: 2, 2: 1.5} + for n in sorted(self.P3): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_p4_harmonic(self): + c = harmonic_centrality(self.P4) + d = {0: 1.8333333, 1: 2.5, 2: 2.5, 3: 1.8333333} + for n in sorted(self.P4): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_clique_complete(self): + c = harmonic_centrality(self.K5) + d = {0: 4, 1: 4, 2: 4, 3: 4, 4: 4} + for n in sorted(self.P3): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_cycle_C4(self): + c = harmonic_centrality(self.C4) + d = {0: 2.5, 1: 2.5, 2: 2.5, 3: 2.5} + for n in sorted(self.C4): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_cycle_C5(self): + c = harmonic_centrality(self.C5) + d = {0: 3, 1: 3, 2: 3, 3: 3, 4: 3, 5: 4} + for n in sorted(self.C5): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_bal_tree(self): + c = harmonic_centrality(self.T) + d = {0: 4.0, 1: 4.1666, 2: 4.1666, 3: 2.8333, 4: 2.8333, 5: 2.8333, 6: 2.8333} + for n in sorted(self.T): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_exampleGraph(self): + c = harmonic_centrality(self.Gb) + d = {0: 0, 1: 2, 2: 1, 3: 2.5, 4: 1} + for n in sorted(self.Gb): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_weighted_harmonic(self): + XG = nx.DiGraph() + XG.add_weighted_edges_from( + [ + ("a", "b", 10), + ("d", "c", 5), + ("a", "c", 1), + ("e", "f", 2), + ("f", "c", 1), + ("a", "f", 3), + ] + ) + c = harmonic_centrality(XG, distance="weight") + d = {"a": 0, "b": 0.1, "c": 2.533, "d": 0, "e": 0, "f": 0.83333} + for n in sorted(XG): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_empty(self): + G = nx.DiGraph() + c = harmonic_centrality(G, distance="weight") + d = {} + assert c == d + + def test_singleton(self): + G = nx.DiGraph() + G.add_node(0) + c = harmonic_centrality(G, distance="weight") + d = {0: 0} + assert c == d + + def test_cycle_c4_directed(self): + c = harmonic_centrality(self.C4_directed, nbunch=[0, 1], sources=[1, 2]) + d = {0: 0.833, 1: 0.333} + for n in [0, 1]: + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_cycle_c4_directed_subset(self): + c = harmonic_centrality(self.C4_directed, nbunch=[0, 1]) + d = 1.833 + for n in [0, 1]: + assert c[n] == pytest.approx(d, abs=1e-3) + + def test_p3_harmonic_subset(self): + c = harmonic_centrality(self.P3, sources=[0, 1]) + d = {0: 1, 1: 1, 2: 1.5} + for n in self.P3: + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_p4_harmonic_subset(self): + c = harmonic_centrality(self.P4, nbunch=[2, 3], sources=[0, 1]) + d = {2: 1.5, 3: 0.8333333} + for n in [2, 3]: + assert c[n] == pytest.approx(d[n], abs=1e-3) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py new file mode 100644 index 0000000..361a370 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_katz_centrality.py @@ -0,0 +1,345 @@ +import math + +import pytest + +import networkx as nx + + +class TestKatzCentrality: + def test_K5(self): + """Katz centrality: K5""" + G = nx.complete_graph(5) + alpha = 0.1 + b = nx.katz_centrality(G, alpha) + v = math.sqrt(1 / 5.0) + b_answer = dict.fromkeys(G, v) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + nstart = dict.fromkeys(G, 1) + b = nx.katz_centrality(G, alpha, nstart=nstart) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + + def test_P3(self): + """Katz centrality: P3""" + alpha = 0.1 + G = nx.path_graph(3) + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} + b = nx.katz_centrality(G, alpha) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + def test_maxiter(self): + with pytest.raises(nx.PowerIterationFailedConvergence): + nx.katz_centrality(nx.path_graph(3), 0.1, max_iter=0) + + def test_beta_as_scalar(self): + alpha = 0.1 + beta = 0.1 + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} + G = nx.path_graph(3) + b = nx.katz_centrality(G, alpha, beta) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + def test_beta_as_dict(self): + alpha = 0.1 + beta = {0: 1.0, 1: 1.0, 2: 1.0} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} + G = nx.path_graph(3) + b = nx.katz_centrality(G, alpha, beta) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + def test_multiple_alpha(self): + alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] + for alpha in alpha_list: + b_answer = { + 0.1: { + 0: 0.5598852584152165, + 1: 0.6107839182711449, + 2: 0.5598852584152162, + }, + 0.2: { + 0: 0.5454545454545454, + 1: 0.6363636363636365, + 2: 0.5454545454545454, + }, + 0.3: { + 0: 0.5333964609104419, + 1: 0.6564879518897746, + 2: 0.5333964609104419, + }, + 0.4: { + 0: 0.5232045649263551, + 1: 0.6726915834767423, + 2: 0.5232045649263551, + }, + 0.5: { + 0: 0.5144957746691622, + 1: 0.6859943117075809, + 2: 0.5144957746691622, + }, + 0.6: { + 0: 0.5069794004195823, + 1: 0.6970966755769258, + 2: 0.5069794004195823, + }, + } + G = nx.path_graph(3) + b = nx.katz_centrality(G, alpha) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[alpha][n], abs=1e-4) + + def test_multigraph(self): + with pytest.raises(nx.NetworkXException): + nx.katz_centrality(nx.MultiGraph(), 0.1) + + def test_empty(self): + e = nx.katz_centrality(nx.Graph(), 0.1) + assert e == {} + + def test_bad_beta(self): + with pytest.raises(nx.NetworkXException): + G = nx.Graph([(0, 1)]) + beta = {0: 77} + nx.katz_centrality(G, 0.1, beta=beta) + + def test_bad_beta_number(self): + with pytest.raises(nx.NetworkXException): + G = nx.Graph([(0, 1)]) + nx.katz_centrality(G, 0.1, beta="foo") + + +class TestKatzCentralityNumpy: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + def test_K5(self): + """Katz centrality: K5""" + G = nx.complete_graph(5) + alpha = 0.1 + b = nx.katz_centrality(G, alpha) + v = math.sqrt(1 / 5.0) + b_answer = dict.fromkeys(G, v) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + b = nx.eigenvector_centrality_numpy(G) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_P3(self): + """Katz centrality: P3""" + alpha = 0.1 + G = nx.path_graph(3) + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} + b = nx.katz_centrality_numpy(G, alpha) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + def test_beta_as_scalar(self): + alpha = 0.1 + beta = 0.1 + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} + G = nx.path_graph(3) + b = nx.katz_centrality_numpy(G, alpha, beta) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + def test_beta_as_dict(self): + alpha = 0.1 + beta = {0: 1.0, 1: 1.0, 2: 1.0} + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} + G = nx.path_graph(3) + b = nx.katz_centrality_numpy(G, alpha, beta) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + def test_multiple_alpha(self): + alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] + for alpha in alpha_list: + b_answer = { + 0.1: { + 0: 0.5598852584152165, + 1: 0.6107839182711449, + 2: 0.5598852584152162, + }, + 0.2: { + 0: 0.5454545454545454, + 1: 0.6363636363636365, + 2: 0.5454545454545454, + }, + 0.3: { + 0: 0.5333964609104419, + 1: 0.6564879518897746, + 2: 0.5333964609104419, + }, + 0.4: { + 0: 0.5232045649263551, + 1: 0.6726915834767423, + 2: 0.5232045649263551, + }, + 0.5: { + 0: 0.5144957746691622, + 1: 0.6859943117075809, + 2: 0.5144957746691622, + }, + 0.6: { + 0: 0.5069794004195823, + 1: 0.6970966755769258, + 2: 0.5069794004195823, + }, + } + G = nx.path_graph(3) + b = nx.katz_centrality_numpy(G, alpha) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[alpha][n], abs=1e-4) + + def test_multigraph(self): + with pytest.raises(nx.NetworkXException): + nx.katz_centrality(nx.MultiGraph(), 0.1) + + def test_empty(self): + e = nx.katz_centrality(nx.Graph(), 0.1) + assert e == {} + + def test_bad_beta(self): + with pytest.raises(nx.NetworkXException): + G = nx.Graph([(0, 1)]) + beta = {0: 77} + nx.katz_centrality_numpy(G, 0.1, beta=beta) + + def test_bad_beta_numbe(self): + with pytest.raises(nx.NetworkXException): + G = nx.Graph([(0, 1)]) + nx.katz_centrality_numpy(G, 0.1, beta="foo") + + def test_K5_unweighted(self): + """Katz centrality: K5""" + G = nx.complete_graph(5) + alpha = 0.1 + b = nx.katz_centrality(G, alpha, weight=None) + v = math.sqrt(1 / 5.0) + b_answer = dict.fromkeys(G, v) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-7) + b = nx.eigenvector_centrality_numpy(G, weight=None) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-3) + + def test_P3_unweighted(self): + """Katz centrality: P3""" + alpha = 0.1 + G = nx.path_graph(3) + b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} + b = nx.katz_centrality_numpy(G, alpha, weight=None) + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-4) + + +class TestKatzCentralityDirected: + @classmethod + def setup_class(cls): + G = nx.DiGraph() + edges = [ + (1, 2), + (1, 3), + (2, 4), + (3, 2), + (3, 5), + (4, 2), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (5, 8), + (6, 8), + (7, 1), + (7, 5), + (7, 8), + (8, 6), + (8, 7), + ] + G.add_edges_from(edges, weight=2.0) + cls.G = G.reverse() + cls.G.alpha = 0.1 + cls.G.evc = [ + 0.3289589783189635, + 0.2832077296243516, + 0.3425906003685471, + 0.3970420865198392, + 0.41074871061646284, + 0.272257430756461, + 0.4201989685435462, + 0.34229059218038554, + ] + + H = nx.DiGraph(edges) + cls.H = G.reverse() + cls.H.alpha = 0.1 + cls.H.evc = [ + 0.3289589783189635, + 0.2832077296243516, + 0.3425906003685471, + 0.3970420865198392, + 0.41074871061646284, + 0.272257430756461, + 0.4201989685435462, + 0.34229059218038554, + ] + + def test_katz_centrality_weighted(self): + G = self.G + alpha = self.G.alpha + p = nx.katz_centrality(G, alpha, weight="weight") + for a, b in zip(list(p.values()), self.G.evc): + assert a == pytest.approx(b, abs=1e-7) + + def test_katz_centrality_unweighted(self): + H = self.H + alpha = self.H.alpha + p = nx.katz_centrality(H, alpha, weight="weight") + for a, b in zip(list(p.values()), self.H.evc): + assert a == pytest.approx(b, abs=1e-7) + + +class TestKatzCentralityDirectedNumpy(TestKatzCentralityDirected): + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + super().setup_class() + + def test_katz_centrality_weighted(self): + G = self.G + alpha = self.G.alpha + p = nx.katz_centrality_numpy(G, alpha, weight="weight") + for a, b in zip(list(p.values()), self.G.evc): + assert a == pytest.approx(b, abs=1e-7) + + def test_katz_centrality_unweighted(self): + H = self.H + alpha = self.H.alpha + p = nx.katz_centrality_numpy(H, alpha, weight="weight") + for a, b in zip(list(p.values()), self.H.evc): + assert a == pytest.approx(b, abs=1e-7) + + +class TestKatzEigenvectorVKatz: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + def test_eigenvector_v_katz_random(self): + G = nx.gnp_random_graph(10, 0.5, seed=1234) + l = max(np.linalg.eigvals(nx.adjacency_matrix(G).todense())) + e = nx.eigenvector_centrality_numpy(G) + k = nx.katz_centrality_numpy(G, 1.0 / l) + for n in G: + assert e[n] == pytest.approx(k[n], abs=1e-7) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py new file mode 100644 index 0000000..c2a2915 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_laplacian_centrality.py @@ -0,0 +1,220 @@ +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +sp = pytest.importorskip("scipy") + + +def test_laplacian_centrality_null_graph(): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept): + d = nx.laplacian_centrality(G, normalized=False) + + +def test_laplacian_centrality_single_node(): + """See gh-6571""" + G = nx.empty_graph(1) + assert nx.laplacian_centrality(G, normalized=False) == {0: 0} + with pytest.raises(ZeroDivisionError): + nx.laplacian_centrality(G, normalized=True) + + +def test_laplacian_centrality_unconnected_nodes(): + """laplacian_centrality on a unconnected node graph should return 0 + + For graphs without edges, the Laplacian energy is 0 and is unchanged with + node removal, so:: + + LC(v) = LE(G) - LE(G - v) = 0 - 0 = 0 + """ + G = nx.empty_graph(3) + assert nx.laplacian_centrality(G, normalized=False) == {0: 0, 1: 0, 2: 0} + + +def test_laplacian_centrality_empty_graph(): + G = nx.empty_graph(3) + with pytest.raises(ZeroDivisionError): + d = nx.laplacian_centrality(G, normalized=True) + + +def test_laplacian_centrality_E(): + E = nx.Graph() + E.add_weighted_edges_from( + [(0, 1, 4), (4, 5, 1), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2)] + ) + d = nx.laplacian_centrality(E) + exact = { + 0: 0.700000, + 1: 0.900000, + 2: 0.280000, + 3: 0.220000, + 4: 0.260000, + 5: 0.040000, + } + + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + # Check not normalized + full_energy = 200 + dnn = nx.laplacian_centrality(E, normalized=False) + for n, dc in dnn.items(): + assert exact[n] * full_energy == pytest.approx(dc, abs=1e-7) + + # Check unweighted not-normalized version + duw_nn = nx.laplacian_centrality(E, normalized=False, weight=None) + exact_uw_nn = { + 0: 18, + 1: 34, + 2: 18, + 3: 10, + 4: 16, + 5: 6, + } + for n, dc in duw_nn.items(): + assert exact_uw_nn[n] == pytest.approx(dc, abs=1e-7) + + # Check unweighted version + duw = nx.laplacian_centrality(E, weight=None) + full_energy = 42 + for n, dc in duw.items(): + assert exact_uw_nn[n] / full_energy == pytest.approx(dc, abs=1e-7) + + +def test_laplacian_centrality_KC(): + KC = nx.karate_club_graph() + d = nx.laplacian_centrality(KC) + exact = { + 0: 0.2543593, + 1: 0.1724524, + 2: 0.2166053, + 3: 0.0964646, + 4: 0.0350344, + 5: 0.0571109, + 6: 0.0540713, + 7: 0.0788674, + 8: 0.1222204, + 9: 0.0217565, + 10: 0.0308751, + 11: 0.0215965, + 12: 0.0174372, + 13: 0.118861, + 14: 0.0366341, + 15: 0.0548712, + 16: 0.0172772, + 17: 0.0191969, + 18: 0.0225564, + 19: 0.0331147, + 20: 0.0279955, + 21: 0.0246361, + 22: 0.0382339, + 23: 0.1294193, + 24: 0.0227164, + 25: 0.0644697, + 26: 0.0281555, + 27: 0.075188, + 28: 0.0364742, + 29: 0.0707087, + 30: 0.0708687, + 31: 0.131019, + 32: 0.2370821, + 33: 0.3066709, + } + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + # Check not normalized + full_energy = 12502 + dnn = nx.laplacian_centrality(KC, normalized=False) + for n, dc in dnn.items(): + assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3) + + +def test_laplacian_centrality_K(): + K = nx.krackhardt_kite_graph() + d = nx.laplacian_centrality(K) + exact = { + 0: 0.3010753, + 1: 0.3010753, + 2: 0.2258065, + 3: 0.483871, + 4: 0.2258065, + 5: 0.3870968, + 6: 0.3870968, + 7: 0.1935484, + 8: 0.0752688, + 9: 0.0322581, + } + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + # Check not normalized + full_energy = 186 + dnn = nx.laplacian_centrality(K, normalized=False) + for n, dc in dnn.items(): + assert exact[n] * full_energy == pytest.approx(dc, abs=1e-3) + + +def test_laplacian_centrality_P3(): + P3 = nx.path_graph(3) + d = nx.laplacian_centrality(P3) + exact = {0: 0.6, 1: 1.0, 2: 0.6} + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + +def test_laplacian_centrality_K5(): + K5 = nx.complete_graph(5) + d = nx.laplacian_centrality(K5) + exact = {0: 0.52, 1: 0.52, 2: 0.52, 3: 0.52, 4: 0.52} + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + +def test_laplacian_centrality_FF(): + FF = nx.florentine_families_graph() + d = nx.laplacian_centrality(FF) + exact = { + "Acciaiuoli": 0.0804598, + "Medici": 0.4022989, + "Castellani": 0.1724138, + "Peruzzi": 0.183908, + "Strozzi": 0.2528736, + "Barbadori": 0.137931, + "Ridolfi": 0.2183908, + "Tornabuoni": 0.2183908, + "Albizzi": 0.1954023, + "Salviati": 0.1149425, + "Pazzi": 0.0344828, + "Bischeri": 0.1954023, + "Guadagni": 0.2298851, + "Ginori": 0.045977, + "Lamberteschi": 0.0574713, + } + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + +def test_laplacian_centrality_DG(): + DG = nx.DiGraph([(0, 5), (1, 5), (2, 5), (3, 5), (4, 5), (5, 6), (5, 7), (5, 8)]) + d = nx.laplacian_centrality(DG) + exact = { + 0: 0.2123352, + 5: 0.515391, + 1: 0.2123352, + 2: 0.2123352, + 3: 0.2123352, + 4: 0.2123352, + 6: 0.2952031, + 7: 0.2952031, + 8: 0.2952031, + } + for n, dc in d.items(): + assert exact[n] == pytest.approx(dc, abs=1e-7) + + # Check not normalized + full_energy = 9.50704 + dnn = nx.laplacian_centrality(DG, normalized=False) + for n, dc in dnn.items(): + assert exact[n] * full_energy == pytest.approx(dc, abs=1e-4) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py new file mode 100644 index 0000000..bf09603 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_load_centrality.py @@ -0,0 +1,344 @@ +import pytest + +import networkx as nx + + +class TestLoadCentrality: + @classmethod + def setup_class(cls): + G = nx.Graph() + G.add_edge(0, 1, weight=3) + G.add_edge(0, 2, weight=2) + G.add_edge(0, 3, weight=6) + G.add_edge(0, 4, weight=4) + G.add_edge(1, 3, weight=5) + G.add_edge(1, 5, weight=5) + G.add_edge(2, 4, weight=1) + G.add_edge(3, 4, weight=2) + G.add_edge(3, 5, weight=1) + G.add_edge(4, 5, weight=4) + cls.G = G + cls.exact_weighted = {0: 4.0, 1: 0.0, 2: 8.0, 3: 6.0, 4: 8.0, 5: 0.0} + cls.K = nx.krackhardt_kite_graph() + cls.P3 = nx.path_graph(3) + cls.P4 = nx.path_graph(4) + cls.K5 = nx.complete_graph(5) + cls.P2 = nx.path_graph(2) + + cls.C4 = nx.cycle_graph(4) + cls.T = nx.balanced_tree(r=2, h=2) + cls.Gb = nx.Graph() + cls.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + cls.F = nx.florentine_families_graph() + cls.LM = nx.les_miserables_graph() + cls.D = nx.cycle_graph(3, create_using=nx.DiGraph()) + cls.D.add_edges_from([(3, 0), (4, 3)]) + + def test_not_strongly_connected(self): + b = nx.load_centrality(self.D) + result = {0: 5.0 / 12, 1: 1.0 / 4, 2: 1.0 / 12, 3: 1.0 / 4, 4: 0.000} + for n in sorted(self.D): + assert result[n] == pytest.approx(b[n], abs=1e-3) + assert result[n] == pytest.approx(nx.load_centrality(self.D, n), abs=1e-3) + + def test_P2_normalized_load(self): + G = self.P2 + c = nx.load_centrality(G, normalized=True) + d = {0: 0.000, 1: 0.000} + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_weighted_load(self): + b = nx.load_centrality(self.G, weight="weight", normalized=False) + for n in sorted(self.G): + assert b[n] == self.exact_weighted[n] + + def test_k5_load(self): + G = self.K5 + c = nx.load_centrality(G) + d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000} + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_p3_load(self): + G = self.P3 + c = nx.load_centrality(G) + d = {0: 0.000, 1: 1.000, 2: 0.000} + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + c = nx.load_centrality(G, v=1) + assert c == pytest.approx(1.0, abs=1e-7) + c = nx.load_centrality(G, v=1, normalized=True) + assert c == pytest.approx(1.0, abs=1e-7) + + def test_p2_load(self): + G = nx.path_graph(2) + c = nx.load_centrality(G) + d = {0: 0.000, 1: 0.000} + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_krackhardt_load(self): + G = self.K + c = nx.load_centrality(G) + d = { + 0: 0.023, + 1: 0.023, + 2: 0.000, + 3: 0.102, + 4: 0.000, + 5: 0.231, + 6: 0.231, + 7: 0.389, + 8: 0.222, + 9: 0.000, + } + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_florentine_families_load(self): + G = self.F + c = nx.load_centrality(G) + d = { + "Acciaiuoli": 0.000, + "Albizzi": 0.211, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.251, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.117, + "Salviati": 0.143, + "Strozzi": 0.106, + "Tornabuoni": 0.090, + } + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_les_miserables_load(self): + G = self.LM + c = nx.load_centrality(G) + d = { + "Napoleon": 0.000, + "Myriel": 0.177, + "MlleBaptistine": 0.000, + "MmeMagloire": 0.000, + "CountessDeLo": 0.000, + "Geborand": 0.000, + "Champtercier": 0.000, + "Cravatte": 0.000, + "Count": 0.000, + "OldMan": 0.000, + "Valjean": 0.567, + "Labarre": 0.000, + "Marguerite": 0.000, + "MmeDeR": 0.000, + "Isabeau": 0.000, + "Gervais": 0.000, + "Listolier": 0.000, + "Tholomyes": 0.043, + "Fameuil": 0.000, + "Blacheville": 0.000, + "Favourite": 0.000, + "Dahlia": 0.000, + "Zephine": 0.000, + "Fantine": 0.128, + "MmeThenardier": 0.029, + "Thenardier": 0.075, + "Cosette": 0.024, + "Javert": 0.054, + "Fauchelevent": 0.026, + "Bamatabois": 0.008, + "Perpetue": 0.000, + "Simplice": 0.009, + "Scaufflaire": 0.000, + "Woman1": 0.000, + "Judge": 0.000, + "Champmathieu": 0.000, + "Brevet": 0.000, + "Chenildieu": 0.000, + "Cochepaille": 0.000, + "Pontmercy": 0.007, + "Boulatruelle": 0.000, + "Eponine": 0.012, + "Anzelma": 0.000, + "Woman2": 0.000, + "MotherInnocent": 0.000, + "Gribier": 0.000, + "MmeBurgon": 0.026, + "Jondrette": 0.000, + "Gavroche": 0.164, + "Gillenormand": 0.021, + "Magnon": 0.000, + "MlleGillenormand": 0.047, + "MmePontmercy": 0.000, + "MlleVaubois": 0.000, + "LtGillenormand": 0.000, + "Marius": 0.133, + "BaronessT": 0.000, + "Mabeuf": 0.028, + "Enjolras": 0.041, + "Combeferre": 0.001, + "Prouvaire": 0.000, + "Feuilly": 0.001, + "Courfeyrac": 0.006, + "Bahorel": 0.002, + "Bossuet": 0.032, + "Joly": 0.002, + "Grantaire": 0.000, + "MotherPlutarch": 0.000, + "Gueulemer": 0.005, + "Babet": 0.005, + "Claquesous": 0.005, + "Montparnasse": 0.004, + "Toussaint": 0.000, + "Child1": 0.000, + "Child2": 0.000, + "Brujon": 0.000, + "MmeHucheloup": 0.000, + } + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_unnormalized_k5_load(self): + G = self.K5 + c = nx.load_centrality(G, normalized=False) + d = {0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000} + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_unnormalized_p3_load(self): + G = self.P3 + c = nx.load_centrality(G, normalized=False) + d = {0: 0.000, 1: 2.000, 2: 0.000} + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_unnormalized_krackhardt_load(self): + G = self.K + c = nx.load_centrality(G, normalized=False) + d = { + 0: 1.667, + 1: 1.667, + 2: 0.000, + 3: 7.333, + 4: 0.000, + 5: 16.667, + 6: 16.667, + 7: 28.000, + 8: 16.000, + 9: 0.000, + } + + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_unnormalized_florentine_families_load(self): + G = self.F + c = nx.load_centrality(G, normalized=False) + + d = { + "Acciaiuoli": 0.000, + "Albizzi": 38.333, + "Barbadori": 17.000, + "Bischeri": 19.000, + "Castellani": 10.000, + "Ginori": 0.000, + "Guadagni": 45.667, + "Lamberteschi": 0.000, + "Medici": 95.000, + "Pazzi": 0.000, + "Peruzzi": 4.000, + "Ridolfi": 21.333, + "Salviati": 26.000, + "Strozzi": 19.333, + "Tornabuoni": 16.333, + } + for n in sorted(G): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_load_betweenness_difference(self): + # Difference Between Load and Betweenness + # --------------------------------------- The smallest graph + # that shows the difference between load and betweenness is + # G=ladder_graph(3) (Graph B below) + + # Graph A and B are from Tao Zhou, Jian-Guo Liu, Bing-Hong + # Wang: Comment on "Scientific collaboration + # networks. II. Shortest paths, weighted networks, and + # centrality". https://arxiv.org/pdf/physics/0511084 + + # Notice that unlike here, their calculation adds to 1 to the + # betweenness of every node i for every path from i to every + # other node. This is exactly what it should be, based on + # Eqn. (1) in their paper: the eqn is B(v) = \sum_{s\neq t, + # s\neq v}{\frac{\sigma_{st}(v)}{\sigma_{st}}}, therefore, + # they allow v to be the target node. + + # We follow Brandes 2001, who follows Freeman 1977 that make + # the sum for betweenness of v exclude paths where v is either + # the source or target node. To agree with their numbers, we + # must additionally, remove edge (4,8) from the graph, see AC + # example following (there is a mistake in the figure in their + # paper - personal communication). + + # A = nx.Graph() + # A.add_edges_from([(0,1), (1,2), (1,3), (2,4), + # (3,5), (4,6), (4,7), (4,8), + # (5,8), (6,9), (7,9), (8,9)]) + B = nx.Graph() # ladder_graph(3) + B.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) + c = nx.load_centrality(B, normalized=False) + d = {0: 1.750, 1: 1.750, 2: 6.500, 3: 6.500, 4: 1.750, 5: 1.750} + for n in sorted(B): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_c4_edge_load(self): + G = self.C4 + c = nx.edge_load_centrality(G) + d = {(0, 1): 6.000, (0, 3): 6.000, (1, 2): 6.000, (2, 3): 6.000} + for n in G.edges(): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_p4_edge_load(self): + G = self.P4 + c = nx.edge_load_centrality(G) + d = {(0, 1): 6.000, (1, 2): 8.000, (2, 3): 6.000} + for n in G.edges(): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_k5_edge_load(self): + G = self.K5 + c = nx.edge_load_centrality(G) + d = { + (0, 1): 5.000, + (0, 2): 5.000, + (0, 3): 5.000, + (0, 4): 5.000, + (1, 2): 5.000, + (1, 3): 5.000, + (1, 4): 5.000, + (2, 3): 5.000, + (2, 4): 5.000, + (3, 4): 5.000, + } + for n in G.edges(): + assert c[n] == pytest.approx(d[n], abs=1e-3) + + def test_tree_edge_load(self): + G = self.T + c = nx.edge_load_centrality(G) + d = { + (0, 1): 24.000, + (0, 2): 24.000, + (1, 3): 12.000, + (1, 4): 12.000, + (2, 5): 12.000, + (2, 6): 12.000, + } + for n in G.edges(): + assert c[n] == pytest.approx(d[n], abs=1e-3) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_percolation_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_percolation_centrality.py new file mode 100644 index 0000000..0cb8f52 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_percolation_centrality.py @@ -0,0 +1,87 @@ +import pytest + +import networkx as nx + + +def example1a_G(): + G = nx.Graph() + G.add_node(1, percolation=0.1) + G.add_node(2, percolation=0.2) + G.add_node(3, percolation=0.2) + G.add_node(4, percolation=0.2) + G.add_node(5, percolation=0.3) + G.add_node(6, percolation=0.2) + G.add_node(7, percolation=0.5) + G.add_node(8, percolation=0.5) + G.add_edges_from([(1, 4), (2, 4), (3, 4), (4, 5), (5, 6), (6, 7), (6, 8)]) + return G + + +def example1b_G(): + G = nx.Graph() + G.add_node(1, percolation=0.3) + G.add_node(2, percolation=0.5) + G.add_node(3, percolation=0.5) + G.add_node(4, percolation=0.2) + G.add_node(5, percolation=0.3) + G.add_node(6, percolation=0.2) + G.add_node(7, percolation=0.1) + G.add_node(8, percolation=0.1) + G.add_edges_from([(1, 4), (2, 4), (3, 4), (4, 5), (5, 6), (6, 7), (6, 8)]) + return G + + +def test_percolation_example1a(): + """percolation centrality: example 1a""" + G = example1a_G() + p = nx.percolation_centrality(G) + p_answer = {4: 0.625, 6: 0.667} + for n, k in p_answer.items(): + assert p[n] == pytest.approx(k, abs=1e-3) + + +def test_percolation_example1b(): + """percolation centrality: example 1a""" + G = example1b_G() + p = nx.percolation_centrality(G) + p_answer = {4: 0.825, 6: 0.4} + for n, k in p_answer.items(): + assert p[n] == pytest.approx(k, abs=1e-3) + + +def test_converge_to_betweenness(): + """percolation centrality: should converge to betweenness + centrality when all nodes are percolated the same""" + # taken from betweenness test test_florentine_families_graph + G = nx.florentine_families_graph() + b_answer = { + "Acciaiuoli": 0.000, + "Albizzi": 0.212, + "Barbadori": 0.093, + "Bischeri": 0.104, + "Castellani": 0.055, + "Ginori": 0.000, + "Guadagni": 0.255, + "Lamberteschi": 0.000, + "Medici": 0.522, + "Pazzi": 0.000, + "Peruzzi": 0.022, + "Ridolfi": 0.114, + "Salviati": 0.143, + "Strozzi": 0.103, + "Tornabuoni": 0.092, + } + + # If no initial state is provided, state for + # every node defaults to 1 + p_answer = nx.percolation_centrality(G) + assert p_answer == pytest.approx(b_answer, abs=1e-3) + + p_states = {k: 0.3 for k, v in b_answer.items()} + p_answer = nx.percolation_centrality(G, states=p_states) + assert p_answer == pytest.approx(b_answer, abs=1e-3) + + +def test_default_percolation(): + G = nx.erdos_renyi_graph(42, 0.42, seed=42) + assert nx.percolation_centrality(G) == pytest.approx(nx.betweenness_centrality(G)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_reaching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_reaching.py new file mode 100644 index 0000000..35d50e7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_reaching.py @@ -0,0 +1,140 @@ +"""Unit tests for the :mod:`networkx.algorithms.centrality.reaching` module.""" + +import pytest + +import networkx as nx + + +class TestGlobalReachingCentrality: + """Unit tests for the global reaching centrality function.""" + + def test_non_positive_weights(self): + with pytest.raises(nx.NetworkXError): + G = nx.DiGraph() + nx.global_reaching_centrality(G, weight="weight") + + def test_negatively_weighted(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) + nx.global_reaching_centrality(G, weight="weight") + + def test_directed_star(self): + G = nx.DiGraph() + G.add_weighted_edges_from([(1, 2, 0.5), (1, 3, 0.5)]) + grc = nx.global_reaching_centrality + assert grc(G, normalized=False, weight="weight") == 0.5 + assert grc(G) == 1 + + def test_undirected_unweighted_star(self): + G = nx.star_graph(2) + grc = nx.global_reaching_centrality + assert grc(G, normalized=False, weight=None) == 0.25 + + def test_undirected_weighted_star(self): + G = nx.Graph() + G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) + grc = nx.global_reaching_centrality + assert grc(G, normalized=False, weight="weight") == 0.375 + + def test_cycle_directed_unweighted(self): + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(2, 1) + assert nx.global_reaching_centrality(G, weight=None) == 0 + + def test_cycle_undirected_unweighted(self): + G = nx.Graph() + G.add_edge(1, 2) + assert nx.global_reaching_centrality(G, weight=None) == 0 + + def test_cycle_directed_weighted(self): + G = nx.DiGraph() + G.add_weighted_edges_from([(1, 2, 1), (2, 1, 1)]) + assert nx.global_reaching_centrality(G) == 0 + + def test_cycle_undirected_weighted(self): + G = nx.Graph() + G.add_edge(1, 2, weight=1) + grc = nx.global_reaching_centrality + assert grc(G, normalized=False) == 0 + + def test_directed_weighted(self): + G = nx.DiGraph() + G.add_edge("A", "B", weight=5) + G.add_edge("B", "C", weight=1) + G.add_edge("B", "D", weight=0.25) + G.add_edge("D", "E", weight=1) + + denom = len(G) - 1 + A_local = sum([5, 3, 2.625, 2.0833333333333]) / denom + B_local = sum([1, 0.25, 0.625]) / denom + C_local = 0 + D_local = sum([1]) / denom + E_local = 0 + + local_reach_ctrs = [A_local, C_local, B_local, D_local, E_local] + max_local = max(local_reach_ctrs) + expected = sum(max_local - lrc for lrc in local_reach_ctrs) / denom + grc = nx.global_reaching_centrality + actual = grc(G, normalized=False, weight="weight") + assert expected == pytest.approx(actual, abs=1e-7) + + def test_single_node_with_cycle(self): + G = nx.DiGraph([(1, 1)]) + with pytest.raises(nx.NetworkXError, match="local_reaching_centrality"): + nx.global_reaching_centrality(G) + + def test_single_node_with_weighted_cycle(self): + G = nx.DiGraph() + G.add_weighted_edges_from([(1, 1, 2)]) + with pytest.raises(nx.NetworkXError, match="local_reaching_centrality"): + nx.global_reaching_centrality(G, weight="weight") + + +class TestLocalReachingCentrality: + """Unit tests for the local reaching centrality function.""" + + def test_non_positive_weights(self): + with pytest.raises(nx.NetworkXError): + G = nx.DiGraph() + G.add_weighted_edges_from([(0, 1, 0)]) + nx.local_reaching_centrality(G, 0, weight="weight") + + def test_negatively_weighted(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, -2), (1, 2, +1)]) + nx.local_reaching_centrality(G, 0, weight="weight") + + def test_undirected_unweighted_star(self): + G = nx.star_graph(2) + grc = nx.local_reaching_centrality + assert grc(G, 1, weight=None, normalized=False) == 0.75 + + def test_undirected_weighted_star(self): + G = nx.Graph() + G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) + centrality = nx.local_reaching_centrality( + G, 1, normalized=False, weight="weight" + ) + assert centrality == 1.5 + + def test_undirected_weighted_normalized(self): + G = nx.Graph() + G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2)]) + centrality = nx.local_reaching_centrality( + G, 1, normalized=True, weight="weight" + ) + assert centrality == 1.0 + + def test_single_node_with_cycle(self): + G = nx.DiGraph([(1, 1)]) + with pytest.raises(nx.NetworkXError, match="local_reaching_centrality"): + nx.local_reaching_centrality(G, 1) + + def test_single_node_with_weighted_cycle(self): + G = nx.DiGraph() + G.add_weighted_edges_from([(1, 1, 2)]) + with pytest.raises(nx.NetworkXError, match="local_reaching_centrality"): + nx.local_reaching_centrality(G, 1, weight="weight") diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py new file mode 100644 index 0000000..cc30478 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_second_order_centrality.py @@ -0,0 +1,82 @@ +""" +Tests for second order centrality. +""" + +import pytest + +pytest.importorskip("numpy") +pytest.importorskip("scipy") + +import networkx as nx + + +def test_empty(): + with pytest.raises(nx.NetworkXException): + G = nx.empty_graph() + nx.second_order_centrality(G) + + +def test_non_connected(): + with pytest.raises(nx.NetworkXException): + G = nx.Graph() + G.add_node(0) + G.add_node(1) + nx.second_order_centrality(G) + + +def test_non_negative_edge_weights(): + with pytest.raises(nx.NetworkXException): + G = nx.path_graph(2) + G.add_edge(0, 1, weight=-1) + nx.second_order_centrality(G) + + +def test_weight_attribute(): + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, 1.0), (1, 2, 3.5)], weight="w") + expected = {0: 3.431, 1: 3.082, 2: 5.612} + b = nx.second_order_centrality(G, weight="w") + + for n in sorted(G): + assert b[n] == pytest.approx(expected[n], abs=1e-2) + + +def test_one_node_graph(): + """Second order centrality: single node""" + G = nx.Graph() + G.add_node(0) + G.add_edge(0, 0) + assert nx.second_order_centrality(G)[0] == 0 + + +def test_P3(): + """Second order centrality: line graph, as defined in paper""" + G = nx.path_graph(3) + b_answer = {0: 3.741, 1: 1.414, 2: 3.741} + + b = nx.second_order_centrality(G) + + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-2) + + +def test_K3(): + """Second order centrality: complete graph, as defined in paper""" + G = nx.complete_graph(3) + b_answer = {0: 1.414, 1: 1.414, 2: 1.414} + + b = nx.second_order_centrality(G) + + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-2) + + +def test_ring_graph(): + """Second order centrality: ring graph, as defined in paper""" + G = nx.cycle_graph(5) + b_answer = {0: 4.472, 1: 4.472, 2: 4.472, 3: 4.472, 4: 4.472} + + b = nx.second_order_centrality(G) + + for n in sorted(G): + assert b[n] == pytest.approx(b_answer[n], abs=1e-2) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py new file mode 100644 index 0000000..7109275 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_subgraph.py @@ -0,0 +1,110 @@ +import pytest + +pytest.importorskip("numpy") +pytest.importorskip("scipy") + +import networkx as nx +from networkx.algorithms.centrality.subgraph_alg import ( + communicability_betweenness_centrality, + estrada_index, + subgraph_centrality, + subgraph_centrality_exp, +) + + +class TestSubgraph: + def test_subgraph_centrality(self): + answer = {0: 1.5430806348152433, 1: 1.5430806348152433} + result = subgraph_centrality(nx.path_graph(2)) + for k, v in result.items(): + assert answer[k] == pytest.approx(v, abs=1e-7) + + answer1 = { + "1": 1.6445956054135658, + "Albert": 2.4368257358712189, + "Aric": 2.4368257358712193, + "Dan": 3.1306328496328168, + "Franck": 2.3876142275231915, + } + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) + result1 = subgraph_centrality(G1) + for k, v in result1.items(): + assert answer1[k] == pytest.approx(v, abs=1e-7) + result1 = subgraph_centrality_exp(G1) + for k, v in result1.items(): + assert answer1[k] == pytest.approx(v, abs=1e-7) + + def test_subgraph_centrality_big_graph(self): + g199 = nx.complete_graph(199) + g200 = nx.complete_graph(200) + + comm199 = nx.subgraph_centrality(g199) + comm199_exp = nx.subgraph_centrality_exp(g199) + + comm200 = nx.subgraph_centrality(g200) + comm200_exp = nx.subgraph_centrality_exp(g200) + + def test_communicability_betweenness_centrality_small(self): + result = communicability_betweenness_centrality(nx.path_graph(2)) + assert result == {0: 0, 1: 0} + + result = communicability_betweenness_centrality(nx.path_graph(1)) + assert result == {0: 0} + + result = communicability_betweenness_centrality(nx.path_graph(0)) + assert result == {} + + answer = {0: 0.1411224421177313, 1: 1.0, 2: 0.1411224421177313} + result = communicability_betweenness_centrality(nx.path_graph(3)) + for k, v in result.items(): + assert answer[k] == pytest.approx(v, abs=1e-7) + + result = communicability_betweenness_centrality(nx.complete_graph(3)) + for k, v in result.items(): + assert 0.49786143366223296 == pytest.approx(v, abs=1e-7) + + def test_communicability_betweenness_centrality(self): + answer = { + 0: 0.07017447951484615, + 1: 0.71565598701107991, + 2: 0.71565598701107991, + 3: 0.07017447951484615, + } + result = communicability_betweenness_centrality(nx.path_graph(4)) + for k, v in result.items(): + assert answer[k] == pytest.approx(v, abs=1e-7) + + answer1 = { + "1": 0.060039074193949521, + "Albert": 0.315470761661372, + "Aric": 0.31547076166137211, + "Dan": 0.68297778678316201, + "Franck": 0.21977926617449497, + } + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) + result1 = communicability_betweenness_centrality(G1) + for k, v in result1.items(): + assert answer1[k] == pytest.approx(v, abs=1e-7) + + def test_estrada_index(self): + answer = 1041.2470334195475 + result = estrada_index(nx.karate_club_graph()) + assert answer == pytest.approx(result, abs=1e-7) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_trophic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_trophic.py new file mode 100644 index 0000000..8f7d74d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_trophic.py @@ -0,0 +1,302 @@ +"""Test trophic levels, trophic differences and trophic coherence""" + +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +def test_trophic_levels(): + """Trivial example""" + G = nx.DiGraph() + G.add_edge("a", "b") + G.add_edge("b", "c") + + d = nx.trophic_levels(G) + assert d == {"a": 1, "b": 2, "c": 3} + + +def test_trophic_levels_levine(): + """Example from Figure 5 in Stephen Levine (1980) J. theor. Biol. 83, + 195-207 + """ + S = nx.DiGraph() + S.add_edge(1, 2, weight=1.0) + S.add_edge(1, 3, weight=0.2) + S.add_edge(1, 4, weight=0.8) + S.add_edge(2, 3, weight=0.2) + S.add_edge(2, 5, weight=0.3) + S.add_edge(4, 3, weight=0.6) + S.add_edge(4, 5, weight=0.7) + S.add_edge(5, 4, weight=0.2) + + # save copy for later, test intermediate implementation details first + S2 = S.copy() + + # drop nodes of in-degree zero + z = [nid for nid, d in S.in_degree if d == 0] + for nid in z: + S.remove_node(nid) + + # find adjacency matrix + q = nx.linalg.graphmatrix.adjacency_matrix(S).T + + # fmt: off + expected_q = np.array([ + [0, 0, 0., 0], + [0.2, 0, 0.6, 0], + [0, 0, 0, 0.2], + [0.3, 0, 0.7, 0] + ]) + # fmt: on + assert np.array_equal(q.todense(), expected_q) + + # must be square, size of number of nodes + assert len(q.shape) == 2 + assert q.shape[0] == q.shape[1] + assert q.shape[0] == len(S) + + nn = q.shape[0] + + i = np.eye(nn) + n = np.linalg.inv(i - q) + y = np.asarray(n) @ np.ones(nn) + + expected_y = np.array([1, 2.07906977, 1.46511628, 2.3255814]) + assert np.allclose(y, expected_y) + + expected_d = {1: 1, 2: 2, 3: 3.07906977, 4: 2.46511628, 5: 3.3255814} + + d = nx.trophic_levels(S2) + + for nid, level in d.items(): + expected_level = expected_d[nid] + assert expected_level == pytest.approx(level, abs=1e-7) + + +def test_trophic_levels_simple(): + matrix_a = np.array([[0, 0], [1, 0]]) + G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph) + d = nx.trophic_levels(G) + assert d[0] == pytest.approx(2, abs=1e-7) + assert d[1] == pytest.approx(1, abs=1e-7) + + +def test_trophic_levels_more_complex(): + # fmt: off + matrix = np.array([ + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix, create_using=nx.DiGraph) + d = nx.trophic_levels(G) + expected_result = [1, 2, 3, 4] + for ind in range(4): + assert d[ind] == pytest.approx(expected_result[ind], abs=1e-7) + + # fmt: off + matrix = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix, create_using=nx.DiGraph) + d = nx.trophic_levels(G) + + expected_result = [1, 2, 2.5, 3.25] + for ind in range(4): + assert d[ind] == pytest.approx(expected_result[ind], abs=1e-7) + + +def test_trophic_levels_even_more_complex(): + # fmt: off + # Another, bigger matrix + matrix = np.array([ + [0, 0, 0, 0, 0], + [0, 1, 0, 1, 0], + [1, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 1, 0] + ]) + # Generated this linear system using pen and paper: + K = np.array([ + [1, 0, -1, 0, 0], + [0, 0.5, 0, -0.5, 0], + [0, 0, 1, 0, 0], + [0, -0.5, 0, 1, -0.5], + [0, 0, 0, 0, 1], + ]) + # fmt: on + result_1 = np.ravel(np.linalg.inv(K) @ np.ones(5)) + G = nx.from_numpy_array(matrix, create_using=nx.DiGraph) + result_2 = nx.trophic_levels(G) + + for ind in range(5): + assert result_1[ind] == pytest.approx(result_2[ind], abs=1e-7) + + +def test_trophic_levels_singular_matrix(): + """Should raise an error with graphs with only non-basal nodes""" + matrix = np.identity(4) + G = nx.from_numpy_array(matrix, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXError, match="no basal nodes"): + nx.trophic_levels(G) + + +def test_trophic_levels_singular_with_basal(): + """Should fail to compute if there are any parts of the graph which are not + reachable from any basal node (with in-degree zero). + """ + G = nx.DiGraph() + # a has in-degree zero + G.add_edge("a", "b") + + # b is one level above a, c and d + G.add_edge("c", "b") + G.add_edge("d", "b") + + # c and d form a loop, neither are reachable from a + G.add_edge("c", "d") + G.add_edge("d", "c") + + with pytest.raises(nx.NetworkXError) as e: + nx.trophic_levels(G) + msg = ( + "Trophic levels are only defined for graphs where every node " + + "has a path from a basal node (basal nodes are nodes with no " + + "incoming edges)." + ) + assert msg in str(e.value) + + # if self-loops are allowed, smaller example: + G = nx.DiGraph() + G.add_edge("a", "b") # a has in-degree zero + G.add_edge("c", "b") # b is one level above a and c + G.add_edge("c", "c") # c has a self-loop + with pytest.raises(nx.NetworkXError) as e: + nx.trophic_levels(G) + msg = ( + "Trophic levels are only defined for graphs where every node " + + "has a path from a basal node (basal nodes are nodes with no " + + "incoming edges)." + ) + assert msg in str(e.value) + + +def test_trophic_differences(): + matrix_a = np.array([[0, 1], [0, 0]]) + G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph) + diffs = nx.trophic_differences(G) + assert diffs[(0, 1)] == pytest.approx(1, abs=1e-7) + + # fmt: off + matrix_b = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph) + diffs = nx.trophic_differences(G) + + assert diffs[(0, 1)] == pytest.approx(1, abs=1e-7) + assert diffs[(0, 2)] == pytest.approx(1.5, abs=1e-7) + assert diffs[(1, 2)] == pytest.approx(0.5, abs=1e-7) + assert diffs[(1, 3)] == pytest.approx(1.25, abs=1e-7) + assert diffs[(2, 3)] == pytest.approx(0.75, abs=1e-7) + + +def test_trophic_incoherence_parameter_no_cannibalism(): + matrix_a = np.array([[0, 1], [0, 0]]) + G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=False) + assert q == pytest.approx(0, abs=1e-7) + + # fmt: off + matrix_b = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=False) + assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7) + + # fmt: off + matrix_c = np.array([ + [0, 1, 1, 0], + [0, 1, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 1] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_c, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=False) + # Ignore the -link + assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7) + + # no self-loops case + # fmt: off + matrix_d = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_d, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=False) + # Ignore the -link + assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7) + + +def test_trophic_incoherence_parameter_cannibalism(): + matrix_a = np.array([[0, 1], [0, 0]]) + G = nx.from_numpy_array(matrix_a, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=True) + assert q == pytest.approx(0, abs=1e-7) + + # fmt: off + matrix_b = np.array([ + [0, 0, 0, 0, 0], + [0, 1, 0, 1, 0], + [1, 0, 0, 0, 0], + [0, 1, 0, 0, 0], + [0, 0, 0, 1, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_b, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=True) + assert q == pytest.approx(2, abs=1e-7) + + # fmt: off + matrix_c = np.array([ + [0, 1, 1, 0], + [0, 0, 1, 1], + [0, 0, 0, 1], + [0, 0, 0, 0] + ]) + # fmt: on + G = nx.from_numpy_array(matrix_c, create_using=nx.DiGraph) + q = nx.trophic_incoherence_parameter(G, cannibalism=True) + # Ignore the -link + assert q == pytest.approx(np.std([1, 1.5, 0.5, 0.75, 1.25]), abs=1e-7) + + +def test_no_basal_node(): + G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) # No basal node, should raise an error + with pytest.raises(nx.NetworkXError, match="no basal node"): + nx.trophic_levels(G) + G.add_node(4) # add basal node, but not connected + with pytest.raises(nx.NetworkXError, match="every node .* path from a basal node"): + nx.trophic_levels(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_voterank.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_voterank.py new file mode 100644 index 0000000..a5cfb61 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/tests/test_voterank.py @@ -0,0 +1,64 @@ +""" +Unit tests for VoteRank. +""" + +import networkx as nx + + +class TestVoteRankCentrality: + # Example Graph present in reference paper + def test_voterank_centrality_1(self): + G = nx.Graph() + G.add_edges_from( + [ + (7, 8), + (7, 5), + (7, 9), + (5, 0), + (0, 1), + (0, 2), + (0, 3), + (0, 4), + (1, 6), + (2, 6), + (3, 6), + (4, 6), + ] + ) + assert [0, 7, 6] == nx.voterank(G) + + def test_voterank_emptygraph(self): + G = nx.Graph() + assert [] == nx.voterank(G) + + # Graph unit test + def test_voterank_centrality_2(self): + G = nx.florentine_families_graph() + d = nx.voterank(G, 4) + exact = ["Medici", "Strozzi", "Guadagni", "Castellani"] + assert exact == d + + # DiGraph unit test + def test_voterank_centrality_3(self): + G = nx.gnc_graph(10, seed=7) + d = nx.voterank(G, 4) + exact = [3, 6, 8] + assert exact == d + + # MultiGraph unit test + def test_voterank_centrality_4(self): + G = nx.MultiGraph() + G.add_edges_from( + [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)] + ) + exact = [2, 1, 5, 4] + assert exact == nx.voterank(G) + + # MultiDiGraph unit test + def test_voterank_centrality_5(self): + G = nx.MultiDiGraph() + G.add_edges_from( + [(0, 1), (0, 1), (1, 2), (2, 5), (2, 5), (5, 6), (5, 6), (2, 4), (4, 3)] + ) + exact = [2, 0, 5, 4] + assert exact == nx.voterank(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/trophic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/trophic.py new file mode 100644 index 0000000..608c110 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/trophic.py @@ -0,0 +1,181 @@ +"""Trophic levels""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["trophic_levels", "trophic_differences", "trophic_incoherence_parameter"] + + +@not_implemented_for("undirected") +@nx._dispatchable(edge_attrs="weight") +def trophic_levels(G, weight="weight"): + r"""Compute the trophic levels of nodes. + + The trophic level of a node $i$ is + + .. math:: + + s_i = 1 + \frac{1}{k^{in}_i} \sum_{j} a_{ij} s_j + + where $k^{in}_i$ is the in-degree of i + + .. math:: + + k^{in}_i = \sum_{j} a_{ij} + + and nodes with $k^{in}_i = 0$ have $s_i = 1$ by convention. + + These are calculated using the method outlined in Levine [1]_. + + Parameters + ---------- + G : DiGraph + A directed networkx graph + + Returns + ------- + nodes : dict + Dictionary of nodes with trophic level as the value. + + References + ---------- + .. [1] Stephen Levine (1980) J. theor. Biol. 83, 195-207 + """ + + basal_nodes = [n for n, deg in G.in_degree if deg == 0] + if not basal_nodes: + raise nx.NetworkXError( + "This graph has no basal nodes (nodes with no incoming edges)." + "Trophic levels are not defined without at least one basal node." + ) + + reachable_nodes = { + node for layer in nx.bfs_layers(G, sources=basal_nodes) for node in layer + } + + if len(reachable_nodes) != len(G.nodes): + raise nx.NetworkXError( + "Trophic levels are only defined for graphs where every node has a path " + "from a basal node (basal nodes are nodes with no incoming edges)." + ) + + import numpy as np + + # find adjacency matrix + a = nx.adjacency_matrix(G, weight=weight).T.toarray() + + # drop rows/columns where in-degree is zero + rowsum = np.sum(a, axis=1) + p = a[rowsum != 0][:, rowsum != 0] + # normalise so sum of in-degree weights is 1 along each row + p = p / rowsum[rowsum != 0][:, np.newaxis] + + # calculate trophic levels + nn = p.shape[0] + i = np.eye(nn) + try: + n = np.linalg.inv(i - p) + except np.linalg.LinAlgError as err: + # LinAlgError is raised when there is a non-basal node + msg = ( + "Trophic levels are only defined for graphs where every " + + "node has a path from a basal node (basal nodes are nodes " + + "with no incoming edges)." + ) + raise nx.NetworkXError(msg) from err + y = n.sum(axis=1) + 1 + + levels = {} + + # all nodes with in-degree zero have trophic level == 1 + zero_node_ids = (node_id for node_id, degree in G.in_degree if degree == 0) + for node_id in zero_node_ids: + levels[node_id] = 1 + + # all other nodes have levels as calculated + nonzero_node_ids = (node_id for node_id, degree in G.in_degree if degree != 0) + for i, node_id in enumerate(nonzero_node_ids): + levels[node_id] = y.item(i) + + return levels + + +@not_implemented_for("undirected") +@nx._dispatchable(edge_attrs="weight") +def trophic_differences(G, weight="weight"): + r"""Compute the trophic differences of the edges of a directed graph. + + The trophic difference $x_ij$ for each edge is defined in Johnson et al. + [1]_ as: + + .. math:: + x_ij = s_j - s_i + + Where $s_i$ is the trophic level of node $i$. + + Parameters + ---------- + G : DiGraph + A directed networkx graph + + Returns + ------- + diffs : dict + Dictionary of edges with trophic differences as the value. + + References + ---------- + .. [1] Samuel Johnson, Virginia Dominguez-Garcia, Luca Donetti, Miguel A. + Munoz (2014) PNAS "Trophic coherence determines food-web stability" + """ + levels = trophic_levels(G, weight=weight) + diffs = {} + for u, v in G.edges: + diffs[(u, v)] = levels[v] - levels[u] + return diffs + + +@not_implemented_for("undirected") +@nx._dispatchable(edge_attrs="weight") +def trophic_incoherence_parameter(G, weight="weight", cannibalism=False): + r"""Compute the trophic incoherence parameter of a graph. + + Trophic coherence is defined as the homogeneity of the distribution of + trophic distances: the more similar, the more coherent. This is measured by + the standard deviation of the trophic differences and referred to as the + trophic incoherence parameter $q$ by [1]. + + Parameters + ---------- + G : DiGraph + A directed networkx graph + + cannibalism: Boolean + If set to False, self edges are not considered in the calculation + + Returns + ------- + trophic_incoherence_parameter : float + The trophic coherence of a graph + + References + ---------- + .. [1] Samuel Johnson, Virginia Dominguez-Garcia, Luca Donetti, Miguel A. + Munoz (2014) PNAS "Trophic coherence determines food-web stability" + """ + import numpy as np + + if cannibalism: + diffs = trophic_differences(G, weight=weight) + else: + # If no cannibalism, remove self-edges + self_loops = list(nx.selfloop_edges(G)) + if self_loops: + # Make a copy so we do not change G's edges in memory + G_2 = G.copy() + G_2.remove_edges_from(self_loops) + else: + # Avoid copy otherwise + G_2 = G + diffs = trophic_differences(G_2, weight=weight) + return float(np.std(list(diffs.values()))) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/voterank_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/voterank_alg.py new file mode 100644 index 0000000..9b510b2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/centrality/voterank_alg.py @@ -0,0 +1,95 @@ +"""Algorithm to select influential nodes in a graph using VoteRank.""" + +import networkx as nx + +__all__ = ["voterank"] + + +@nx._dispatchable +def voterank(G, number_of_nodes=None): + """Select a list of influential nodes in a graph using VoteRank algorithm + + VoteRank [1]_ computes a ranking of the nodes in a graph G based on a + voting scheme. With VoteRank, all nodes vote for each of its in-neighbors + and the node with the highest votes is elected iteratively. The voting + ability of out-neighbors of elected nodes is decreased in subsequent turns. + + Parameters + ---------- + G : graph + A NetworkX graph. + + number_of_nodes : integer, optional + Number of ranked nodes to extract (default all nodes). + + Returns + ------- + voterank : list + Ordered list of computed seeds. + Only nodes with positive number of votes are returned. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 4)]) + >>> nx.voterank(G) + [0, 1] + + The algorithm can be used both for undirected and directed graphs. + However, the directed version is different in two ways: + (i) nodes only vote for their in-neighbors and + (ii) only the voting ability of elected node and its out-neighbors are updated: + + >>> G = nx.DiGraph([(0, 1), (2, 1), (2, 3), (3, 4)]) + >>> nx.voterank(G) + [2, 3] + + Notes + ----- + Each edge is treated independently in case of multigraphs. + + References + ---------- + .. [1] Zhang, J.-X. et al. (2016). + Identifying a set of influential spreaders in complex networks. + Sci. Rep. 6, 27823; doi: 10.1038/srep27823. + """ + influential_nodes = [] + vote_rank = {} + if len(G) == 0: + return influential_nodes + if number_of_nodes is None or number_of_nodes > len(G): + number_of_nodes = len(G) + if G.is_directed(): + # For directed graphs compute average out-degree + avgDegree = sum(deg for _, deg in G.out_degree()) / len(G) + else: + # For undirected graphs compute average degree + avgDegree = sum(deg for _, deg in G.degree()) / len(G) + # step 1 - initiate all nodes to (0,1) (score, voting ability) + for n in G.nodes(): + vote_rank[n] = [0, 1] + # Repeat steps 1b to 4 until num_seeds are elected. + for _ in range(number_of_nodes): + # step 1b - reset rank + for n in G.nodes(): + vote_rank[n][0] = 0 + # step 2 - vote + for n, nbr in G.edges(): + # In directed graphs nodes only vote for their in-neighbors + vote_rank[n][0] += vote_rank[nbr][1] + if not G.is_directed(): + vote_rank[nbr][0] += vote_rank[n][1] + for n in influential_nodes: + vote_rank[n][0] = 0 + # step 3 - select top node + n = max(G.nodes, key=lambda x: vote_rank[x][0]) + if vote_rank[n][0] == 0: + return influential_nodes + influential_nodes.append(n) + # weaken the selected node + vote_rank[n] = [0, 0] + # step 4 - update voterank properties + for _, nbr in G.edges(n): + vote_rank[nbr][1] -= 1 / avgDegree + vote_rank[nbr][1] = max(vote_rank[nbr][1], 0) + return influential_nodes diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py new file mode 100644 index 0000000..ae342d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/chains.py @@ -0,0 +1,172 @@ +"""Functions for finding chains in a graph.""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["chain_decomposition"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def chain_decomposition(G, root=None): + """Returns the chain decomposition of a graph. + + The *chain decomposition* of a graph with respect a depth-first + search tree is a set of cycles or paths derived from the set of + fundamental cycles of the tree in the following manner. Consider + each fundamental cycle with respect to the given tree, represented + as a list of edges beginning with the nontree edge oriented away + from the root of the tree. For each fundamental cycle, if it + overlaps with any previous fundamental cycle, just take the initial + non-overlapping segment, which is a path instead of a cycle. Each + cycle or path is called a *chain*. For more information, see [1]_. + + Parameters + ---------- + G : undirected graph + + root : node (optional) + A node in the graph `G`. If specified, only the chain + decomposition for the connected component containing this node + will be returned. This node indicates the root of the depth-first + search tree. + + Yields + ------ + chain : list + A list of edges representing a chain. There is no guarantee on + the orientation of the edges in each chain (for example, if a + chain includes the edge joining nodes 1 and 2, the chain may + include either (1, 2) or (2, 1)). + + Raises + ------ + NodeNotFound + If `root` is not in the graph `G`. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> list(nx.chain_decomposition(G)) + [[(4, 5), (5, 3), (3, 4)]] + + Notes + ----- + The worst-case running time of this implementation is linear in the + number of nodes and number of edges [1]_. + + References + ---------- + .. [1] Jens M. Schmidt (2013). "A simple test on 2-vertex- + and 2-edge-connectivity." *Information Processing Letters*, + 113, 241–244. Elsevier. + + """ + + def _dfs_cycle_forest(G, root=None): + """Builds a directed graph composed of cycles from the given graph. + + `G` is an undirected simple graph. `root` is a node in the graph + from which the depth-first search is started. + + This function returns both the depth-first search cycle graph + (as a :class:`~networkx.DiGraph`) and the list of nodes in + depth-first preorder. The depth-first search cycle graph is a + directed graph whose edges are the edges of `G` oriented toward + the root if the edge is a tree edge and away from the root if + the edge is a non-tree edge. If `root` is not specified, this + performs a depth-first search on each connected component of `G` + and returns a directed forest instead. + + If `root` is not in the graph, this raises :exc:`KeyError`. + + """ + # Create a directed graph from the depth-first search tree with + # root node `root` in which tree edges are directed toward the + # root and nontree edges are directed away from the root. For + # each node with an incident nontree edge, this creates a + # directed cycle starting with the nontree edge and returning to + # that node. + # + # The `parent` node attribute stores the parent of each node in + # the DFS tree. The `nontree` edge attribute indicates whether + # the edge is a tree edge or a nontree edge. + # + # We also store the order of the nodes found in the depth-first + # search in the `nodes` list. + H = nx.DiGraph() + nodes = [] + for u, v, d in nx.dfs_labeled_edges(G, source=root): + if d == "forward": + # `dfs_labeled_edges()` yields (root, root, 'forward') + # if it is beginning the search on a new connected + # component. + if u == v: + H.add_node(v, parent=None) + nodes.append(v) + else: + H.add_node(v, parent=u) + H.add_edge(v, u, nontree=False) + nodes.append(v) + # `dfs_labeled_edges` considers nontree edges in both + # orientations, so we need to not add the edge if it its + # other orientation has been added. + elif d == "nontree" and v not in H[u]: + H.add_edge(v, u, nontree=True) + else: + # Do nothing on 'reverse' edges; we only care about + # forward and nontree edges. + pass + return H, nodes + + def _build_chain(G, u, v, visited): + """Generate the chain starting from the given nontree edge. + + `G` is a DFS cycle graph as constructed by + :func:`_dfs_cycle_graph`. The edge (`u`, `v`) is a nontree edge + that begins a chain. `visited` is a set representing the nodes + in `G` that have already been visited. + + This function yields the edges in an initial segment of the + fundamental cycle of `G` starting with the nontree edge (`u`, + `v`) that includes all the edges up until the first node that + appears in `visited`. The tree edges are given by the 'parent' + node attribute. The `visited` set is updated to add each node in + an edge yielded by this function. + + """ + while v not in visited: + yield u, v + visited.add(v) + u, v = v, G.nodes[v]["parent"] + yield u, v + + # Check if the root is in the graph G. If not, raise NodeNotFound + if root is not None and root not in G: + raise nx.NodeNotFound(f"Root node {root} is not in graph") + + # Create a directed version of H that has the DFS edges directed + # toward the root and the nontree edges directed away from the root + # (in each connected component). + H, nodes = _dfs_cycle_forest(G, root) + + # Visit the nodes again in DFS order. For each node, and for each + # nontree edge leaving that node, compute the fundamental cycle for + # that nontree edge starting with that edge. If the fundamental + # cycle overlaps with any visited nodes, just take the prefix of the + # cycle up to the point of visited nodes. + # + # We repeat this process for each connected component (implicitly, + # since `nodes` already has a list of the nodes grouped by connected + # component). + visited = set() + for u in nodes: + visited.add(u) + # For each nontree edge going out of node u... + edges = ((u, v) for u, v, d in H.out_edges(u, data="nontree") if d) + for u, v in edges: + # Create the cycle or cycle prefix starting with the + # nontree edge. + chain = list(_build_chain(H, u, v, visited)) + yield chain diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py new file mode 100644 index 0000000..deb6ed0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/chordal.py @@ -0,0 +1,443 @@ +""" +Algorithms for chordal graphs. + +A graph is chordal if every cycle of length at least 4 has a chord +(an edge joining two nodes not adjacent in the cycle). +https://en.wikipedia.org/wiki/Chordal_graph +""" + +import sys + +import networkx as nx +from networkx.algorithms.components import connected_components +from networkx.utils import arbitrary_element, not_implemented_for + +__all__ = [ + "is_chordal", + "find_induced_nodes", + "chordal_graph_cliques", + "chordal_graph_treewidth", + "NetworkXTreewidthBoundExceeded", + "complete_to_chordal_graph", +] + + +class NetworkXTreewidthBoundExceeded(nx.NetworkXException): + """Exception raised when a treewidth bound has been provided and it has + been exceeded""" + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_chordal(G): + """Checks whether G is a chordal graph. + + A graph is chordal if every cycle of length at least 4 has a chord + (an edge joining two nodes not adjacent in the cycle). + + Parameters + ---------- + G : graph + A NetworkX graph. + + Returns + ------- + chordal : bool + True if G is a chordal graph and False otherwise. + + Raises + ------ + NetworkXNotImplemented + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + + Examples + -------- + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... ] + >>> G = nx.Graph(e) + >>> nx.is_chordal(G) + True + + Notes + ----- + The routine tries to go through every node following maximum cardinality + search. It returns False when it finds that the separator for any node + is not a clique. Based on the algorithms in [1]_. + + Self loops are ignored. + + References + ---------- + .. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms + to test chordality of graphs, test acyclicity of hypergraphs, and + selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984), + pp. 566–579. + """ + if len(G.nodes) <= 3: + return True + return len(_find_chordality_breaker(G)) == 0 + + +@nx._dispatchable +def find_induced_nodes(G, s, t, treewidth_bound=sys.maxsize): + """Returns the set of induced nodes in the path from s to t. + + Parameters + ---------- + G : graph + A chordal NetworkX graph + s : node + Source node to look for induced nodes + t : node + Destination node to look for induced nodes + treewidth_bound: float + Maximum treewidth acceptable for the graph H. The search + for induced nodes will end as soon as the treewidth_bound is exceeded. + + Returns + ------- + induced_nodes : Set of nodes + The set of induced nodes in the path from s to t in G + + Raises + ------ + NetworkXError + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + If the input graph is an instance of one of these classes, a + :exc:`NetworkXError` is raised. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + + Examples + -------- + >>> G = nx.Graph() + >>> G = nx.generators.classic.path_graph(10) + >>> induced_nodes = nx.find_induced_nodes(G, 1, 9, 2) + >>> sorted(induced_nodes) + [1, 2, 3, 4, 5, 6, 7, 8, 9] + + Notes + ----- + G must be a chordal graph and (s,t) an edge that is not in G. + + If a treewidth_bound is provided, the search for induced nodes will end + as soon as the treewidth_bound is exceeded. + + The algorithm is inspired by Algorithm 4 in [1]_. + A formal definition of induced node can also be found on that reference. + + Self Loops are ignored + + References + ---------- + .. [1] Learning Bounded Treewidth Bayesian Networks. + Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008. + http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf + """ + if not is_chordal(G): + raise nx.NetworkXError("Input graph is not chordal.") + + H = nx.Graph(G) + H.add_edge(s, t) + induced_nodes = set() + triplet = _find_chordality_breaker(H, s, treewidth_bound) + while triplet: + (u, v, w) = triplet + induced_nodes.update(triplet) + for n in triplet: + if n != s: + H.add_edge(s, n) + triplet = _find_chordality_breaker(H, s, treewidth_bound) + if induced_nodes: + # Add t and the second node in the induced path from s to t. + induced_nodes.add(t) + for u in G[s]: + if len(induced_nodes & set(G[u])) == 2: + induced_nodes.add(u) + break + return induced_nodes + + +@nx._dispatchable +def chordal_graph_cliques(G): + """Returns all maximal cliques of a chordal graph. + + The algorithm breaks the graph in connected components and performs a + maximum cardinality search in each component to get the cliques. + + Parameters + ---------- + G : graph + A NetworkX graph + + Yields + ------ + frozenset of nodes + Maximal cliques, each of which is a frozenset of + nodes in `G`. The order of cliques is arbitrary. + + Raises + ------ + NetworkXError + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + + Examples + -------- + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... (7, 8), + ... ] + >>> G = nx.Graph(e) + >>> G.add_node(9) + >>> cliques = [c for c in chordal_graph_cliques(G)] + >>> cliques[0] + frozenset({1, 2, 3}) + """ + for C in (G.subgraph(c).copy() for c in connected_components(G)): + if C.number_of_nodes() == 1: + if nx.number_of_selfloops(C) > 0: + raise nx.NetworkXError("Input graph is not chordal.") + yield frozenset(C.nodes()) + else: + unnumbered = set(C.nodes()) + v = arbitrary_element(C) + unnumbered.remove(v) + numbered = {v} + clique_wanna_be = {v} + while unnumbered: + v = _max_cardinality_node(C, unnumbered, numbered) + unnumbered.remove(v) + numbered.add(v) + new_clique_wanna_be = set(C.neighbors(v)) & numbered + sg = C.subgraph(clique_wanna_be) + if _is_complete_graph(sg): + new_clique_wanna_be.add(v) + if not new_clique_wanna_be >= clique_wanna_be: + yield frozenset(clique_wanna_be) + clique_wanna_be = new_clique_wanna_be + else: + raise nx.NetworkXError("Input graph is not chordal.") + yield frozenset(clique_wanna_be) + + +@nx._dispatchable +def chordal_graph_treewidth(G): + """Returns the treewidth of the chordal graph G. + + Parameters + ---------- + G : graph + A NetworkX graph + + Returns + ------- + treewidth : int + The size of the largest clique in the graph minus one. + + Raises + ------ + NetworkXError + The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. + The algorithm can only be applied to chordal graphs. If the input + graph is found to be non-chordal, a :exc:`NetworkXError` is raised. + + Examples + -------- + >>> e = [ + ... (1, 2), + ... (1, 3), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... (3, 5), + ... (3, 6), + ... (4, 5), + ... (4, 6), + ... (5, 6), + ... (7, 8), + ... ] + >>> G = nx.Graph(e) + >>> G.add_node(9) + >>> nx.chordal_graph_treewidth(G) + 3 + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Tree_decomposition#Treewidth + """ + if not is_chordal(G): + raise nx.NetworkXError("Input graph is not chordal.") + + max_clique = -1 + for clique in nx.chordal_graph_cliques(G): + max_clique = max(max_clique, len(clique)) + return max_clique - 1 + + +def _is_complete_graph(G): + """Returns True if G is a complete graph.""" + if nx.number_of_selfloops(G) > 0: + raise nx.NetworkXError("Self loop found in _is_complete_graph()") + n = G.number_of_nodes() + if n < 2: + return True + e = G.number_of_edges() + max_edges = (n * (n - 1)) / 2 + return e == max_edges + + +def _find_missing_edge(G): + """Given a non-complete graph G, returns a missing edge.""" + nodes = set(G) + for u in G: + missing = nodes - set(list(G[u].keys()) + [u]) + if missing: + return (u, missing.pop()) + + +def _max_cardinality_node(G, choices, wanna_connect): + """Returns a the node in choices that has more connections in G + to nodes in wanna_connect. + """ + max_number = -1 + for x in choices: + number = len([y for y in G[x] if y in wanna_connect]) + if number > max_number: + max_number = number + max_cardinality_node = x + return max_cardinality_node + + +def _find_chordality_breaker(G, s=None, treewidth_bound=sys.maxsize): + """Given a graph G, starts a max cardinality search + (starting from s if s is given and from an arbitrary node otherwise) + trying to find a non-chordal cycle. + + If it does find one, it returns (u,v,w) where u,v,w are the three + nodes that together with s are involved in the cycle. + + It ignores any self loops. + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") + unnumbered = set(G) + if s is None: + s = arbitrary_element(G) + unnumbered.remove(s) + numbered = {s} + current_treewidth = -1 + while unnumbered: # and current_treewidth <= treewidth_bound: + v = _max_cardinality_node(G, unnumbered, numbered) + unnumbered.remove(v) + numbered.add(v) + clique_wanna_be = set(G[v]) & numbered + sg = G.subgraph(clique_wanna_be) + if _is_complete_graph(sg): + # The graph seems to be chordal by now. We update the treewidth + current_treewidth = max(current_treewidth, len(clique_wanna_be)) + if current_treewidth > treewidth_bound: + raise nx.NetworkXTreewidthBoundExceeded( + f"treewidth_bound exceeded: {current_treewidth}" + ) + else: + # sg is not a clique, + # look for an edge that is not included in sg + (u, w) = _find_missing_edge(sg) + return (u, v, w) + return () + + +@not_implemented_for("directed") +@nx._dispatchable(returns_graph=True) +def complete_to_chordal_graph(G): + """Return a copy of G completed to a chordal graph + + Adds edges to a copy of G to create a chordal graph. A graph G=(V,E) is + called chordal if for each cycle with length bigger than 3, there exist + two non-adjacent nodes connected by an edge (called a chord). + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + H : NetworkX graph + The chordal enhancement of G + alpha : Dictionary + The elimination ordering of nodes of G + + Notes + ----- + There are different approaches to calculate the chordal + enhancement of a graph. The algorithm used here is called + MCS-M and gives at least minimal (local) triangulation of graph. Note + that this triangulation is not necessarily a global minimum. + + https://en.wikipedia.org/wiki/Chordal_graph + + References + ---------- + .. [1] Berry, Anne & Blair, Jean & Heggernes, Pinar & Peyton, Barry. (2004) + Maximum Cardinality Search for Computing Minimal Triangulations of + Graphs. Algorithmica. 39. 287-298. 10.1007/s00453-004-1084-3. + + Examples + -------- + >>> from networkx.algorithms.chordal import complete_to_chordal_graph + >>> G = nx.wheel_graph(10) + >>> H, alpha = complete_to_chordal_graph(G) + """ + H = G.copy() + alpha = dict.fromkeys(H, 0) + if nx.is_chordal(H): + return H, alpha + chords = set() + weight = dict.fromkeys(H.nodes(), 0) + unnumbered_nodes = list(H.nodes()) + for i in range(len(H.nodes()), 0, -1): + # get the node in unnumbered_nodes with the maximum weight + z = max(unnumbered_nodes, key=lambda node: weight[node]) + unnumbered_nodes.remove(z) + alpha[z] = i + update_nodes = [] + for y in unnumbered_nodes: + if G.has_edge(y, z): + update_nodes.append(y) + else: + # y_weight will be bigger than node weights between y and z + y_weight = weight[y] + lower_nodes = [ + node for node in unnumbered_nodes if weight[node] < y_weight + ] + if nx.has_path(H.subgraph(lower_nodes + [z, y]), y, z): + update_nodes.append(y) + chords.add((z, y)) + # during calculation of paths the weights should not be updated + for node in update_nodes: + weight[node] += 1 + H.add_edges_from(chords) + return H, alpha diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py new file mode 100644 index 0000000..fe961a9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/clique.py @@ -0,0 +1,757 @@ +"""Functions for finding and manipulating cliques. + +Finding the largest clique in a graph is NP-complete problem, so most of +these algorithms have an exponential running time; for more information, +see the Wikipedia article on the clique problem [1]_. + +.. [1] clique problem:: https://en.wikipedia.org/wiki/Clique_problem + +""" + +from collections import Counter, defaultdict, deque +from itertools import chain, combinations, islice + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "find_cliques", + "find_cliques_recursive", + "make_max_clique_graph", + "make_clique_bipartite", + "node_clique_number", + "number_of_cliques", + "enumerate_all_cliques", + "max_weight_clique", +] + + +@not_implemented_for("directed") +@nx._dispatchable +def enumerate_all_cliques(G): + """Returns all cliques in an undirected graph. + + This function returns an iterator over cliques, each of which is a + list of nodes. The iteration is ordered by cardinality of the + cliques: first all cliques of size one, then all cliques of size + two, etc. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + Returns + ------- + iterator + An iterator over cliques, each of which is a list of nodes in + `G`. The cliques are ordered according to size. + + Notes + ----- + To obtain a list of all cliques, use + `list(enumerate_all_cliques(G))`. However, be aware that in the + worst-case, the length of this list can be exponential in the number + of nodes in the graph (for example, when the graph is the complete + graph). This function avoids storing all cliques in memory by only + keeping current candidate node lists in memory during its search. + + The implementation is adapted from the algorithm by Zhang, et + al. (2005) [1]_ to output all cliques discovered. + + This algorithm ignores self-loops and parallel edges, since cliques + are not conventionally defined with such edges. + + References + ---------- + .. [1] Yun Zhang, Abu-Khzam, F.N., Baldwin, N.E., Chesler, E.J., + Langston, M.A., Samatova, N.F., + "Genome-Scale Computational Approaches to Memory-Intensive + Applications in Systems Biology". + *Supercomputing*, 2005. Proceedings of the ACM/IEEE SC 2005 + Conference, pp. 12, 12--18 Nov. 2005. + . + + """ + index = {} + nbrs = {} + for u in G: + index[u] = len(index) + # Neighbors of u that appear after u in the iteration order of G. + nbrs[u] = {v for v in G[u] if v not in index} + + queue = deque(([u], sorted(nbrs[u], key=index.__getitem__)) for u in G) + # Loop invariants: + # 1. len(base) is nondecreasing. + # 2. (base + cnbrs) is sorted with respect to the iteration order of G. + # 3. cnbrs is a set of common neighbors of nodes in base. + while queue: + base, cnbrs = map(list, queue.popleft()) + yield base + for i, u in enumerate(cnbrs): + # Use generators to reduce memory consumption. + queue.append( + ( + chain(base, [u]), + filter(nbrs[u].__contains__, islice(cnbrs, i + 1, None)), + ) + ) + + +@not_implemented_for("directed") +@nx._dispatchable +def find_cliques(G, nodes=None): + """Returns all maximal cliques in an undirected graph. + + For each node *n*, a *maximal clique for n* is a largest complete + subgraph containing *n*. The largest maximal clique is sometimes + called the *maximum clique*. + + This function returns an iterator over cliques, each of which is a + list of nodes. It is an iterative implementation, so should not + suffer from recursion depth issues. + + This function accepts a list of `nodes` and only the maximal cliques + containing all of these `nodes` are returned. It can considerably speed up + the running time if some specific cliques are desired. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + nodes : list, optional (default=None) + If provided, only yield *maximal cliques* containing all nodes in `nodes`. + If `nodes` isn't a clique itself, a ValueError is raised. + + Returns + ------- + iterator + An iterator over maximal cliques, each of which is a list of + nodes in `G`. If `nodes` is provided, only the maximal cliques + containing all the nodes in `nodes` are returned. The order of + cliques is arbitrary. + + Raises + ------ + ValueError + If `nodes` is not a clique. + + Examples + -------- + >>> from pprint import pprint # For nice dict formatting + >>> G = nx.karate_club_graph() + >>> sum(1 for c in nx.find_cliques(G)) # The number of maximal cliques in G + 36 + >>> max(nx.find_cliques(G), key=len) # The largest maximal clique in G + [0, 1, 2, 3, 13] + + The size of the largest maximal clique is known as the *clique number* of + the graph, which can be found directly with: + + >>> max(len(c) for c in nx.find_cliques(G)) + 5 + + One can also compute the number of maximal cliques in `G` that contain a given + node. The following produces a dictionary keyed by node whose + values are the number of maximal cliques in `G` that contain the node: + + >>> from collections import Counter + >>> from itertools import chain + >>> counts = Counter(chain.from_iterable(nx.find_cliques(G))) + >>> pprint(dict(counts)) + {0: 13, + 1: 6, + 2: 7, + 3: 3, + 4: 2, + 5: 3, + 6: 3, + 7: 1, + 8: 3, + 9: 2, + 10: 2, + 11: 1, + 12: 1, + 13: 2, + 14: 1, + 15: 1, + 16: 1, + 17: 1, + 18: 1, + 19: 2, + 20: 1, + 21: 1, + 22: 1, + 23: 3, + 24: 2, + 25: 2, + 26: 1, + 27: 3, + 28: 2, + 29: 2, + 30: 2, + 31: 4, + 32: 9, + 33: 14} + + Or, similarly, the maximal cliques in `G` that contain a given node. + For example, the 4 maximal cliques that contain node 31: + + >>> [c for c in nx.find_cliques(G) if 31 in c] + [[0, 31], [33, 32, 31], [33, 28, 31], [24, 25, 31]] + + See Also + -------- + find_cliques_recursive + A recursive version of the same algorithm. + + Notes + ----- + To obtain a list of all maximal cliques, use + `list(find_cliques(G))`. However, be aware that in the worst-case, + the length of this list can be exponential in the number of nodes in + the graph. This function avoids storing all cliques in memory by + only keeping current candidate node lists in memory during its search. + + This implementation is based on the algorithm published by Bron and + Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi + (2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. It + essentially unrolls the recursion used in the references to avoid + issues of recursion stack depth (for a recursive implementation, see + :func:`find_cliques_recursive`). + + This algorithm ignores self-loops and parallel edges, since cliques + are not conventionally defined with such edges. + + References + ---------- + .. [1] Bron, C. and Kerbosch, J. + "Algorithm 457: finding all cliques of an undirected graph". + *Communications of the ACM* 16, 9 (Sep. 1973), 575--577. + + + .. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi, + "The worst-case time complexity for generating all maximal + cliques and computational experiments", + *Theoretical Computer Science*, Volume 363, Issue 1, + Computing and Combinatorics, + 10th Annual International Conference on + Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42 + + + .. [3] F. Cazals, C. Karande, + "A note on the problem of reporting maximal cliques", + *Theoretical Computer Science*, + Volume 407, Issues 1--3, 6 November 2008, Pages 564--568, + + + """ + if len(G) == 0: + return + + adj = {u: {v for v in G[u] if v != u} for u in G} + + # Initialize Q with the given nodes and subg, cand with their nbrs + Q = nodes[:] if nodes is not None else [] + cand = set(G) + for node in Q: + if node not in cand: + raise ValueError(f"The given `nodes` {nodes} do not form a clique") + cand &= adj[node] + + if not cand: + yield Q[:] + return + + subg = cand.copy() + stack = [] + Q.append(None) + + u = max(subg, key=lambda u: len(cand & adj[u])) + ext_u = cand - adj[u] + + try: + while True: + if ext_u: + q = ext_u.pop() + cand.remove(q) + Q[-1] = q + adj_q = adj[q] + subg_q = subg & adj_q + if not subg_q: + yield Q[:] + else: + cand_q = cand & adj_q + if cand_q: + stack.append((subg, cand, ext_u)) + Q.append(None) + subg = subg_q + cand = cand_q + u = max(subg, key=lambda u: len(cand & adj[u])) + ext_u = cand - adj[u] + else: + Q.pop() + subg, cand, ext_u = stack.pop() + except IndexError: + pass + + +# TODO Should this also be not implemented for directed graphs? +@nx._dispatchable +def find_cliques_recursive(G, nodes=None): + """Returns all maximal cliques in a graph. + + For each node *v*, a *maximal clique for v* is a largest complete + subgraph containing *v*. The largest maximal clique is sometimes + called the *maximum clique*. + + This function returns an iterator over cliques, each of which is a + list of nodes. It is a recursive implementation, so may suffer from + recursion depth issues, but is included for pedagogical reasons. + For a non-recursive implementation, see :func:`find_cliques`. + + This function accepts a list of `nodes` and only the maximal cliques + containing all of these `nodes` are returned. It can considerably speed up + the running time if some specific cliques are desired. + + Parameters + ---------- + G : NetworkX graph + + nodes : list, optional (default=None) + If provided, only yield *maximal cliques* containing all nodes in `nodes`. + If `nodes` isn't a clique itself, a ValueError is raised. + + Returns + ------- + iterator + An iterator over maximal cliques, each of which is a list of + nodes in `G`. If `nodes` is provided, only the maximal cliques + containing all the nodes in `nodes` are yielded. The order of + cliques is arbitrary. + + Raises + ------ + ValueError + If `nodes` is not a clique. + + See Also + -------- + find_cliques + An iterative version of the same algorithm. See docstring for examples. + + Notes + ----- + To obtain a list of all maximal cliques, use + `list(find_cliques_recursive(G))`. However, be aware that in the + worst-case, the length of this list can be exponential in the number + of nodes in the graph. This function avoids storing all cliques in memory + by only keeping current candidate node lists in memory during its search. + + This implementation is based on the algorithm published by Bron and + Kerbosch (1973) [1]_, as adapted by Tomita, Tanaka and Takahashi + (2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. For a + non-recursive implementation, see :func:`find_cliques`. + + This algorithm ignores self-loops and parallel edges, since cliques + are not conventionally defined with such edges. + + References + ---------- + .. [1] Bron, C. and Kerbosch, J. + "Algorithm 457: finding all cliques of an undirected graph". + *Communications of the ACM* 16, 9 (Sep. 1973), 575--577. + + + .. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi, + "The worst-case time complexity for generating all maximal + cliques and computational experiments", + *Theoretical Computer Science*, Volume 363, Issue 1, + Computing and Combinatorics, + 10th Annual International Conference on + Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28--42 + + + .. [3] F. Cazals, C. Karande, + "A note on the problem of reporting maximal cliques", + *Theoretical Computer Science*, + Volume 407, Issues 1--3, 6 November 2008, Pages 564--568, + + + """ + if len(G) == 0: + return iter([]) + + adj = {u: {v for v in G[u] if v != u} for u in G} + + # Initialize Q with the given nodes and subg, cand with their nbrs + Q = nodes[:] if nodes is not None else [] + cand_init = set(G) + for node in Q: + if node not in cand_init: + raise ValueError(f"The given `nodes` {nodes} do not form a clique") + cand_init &= adj[node] + + if not cand_init: + return iter([Q]) + + subg_init = cand_init.copy() + + def expand(subg, cand): + u = max(subg, key=lambda u: len(cand & adj[u])) + for q in cand - adj[u]: + cand.remove(q) + Q.append(q) + adj_q = adj[q] + subg_q = subg & adj_q + if not subg_q: + yield Q[:] + else: + cand_q = cand & adj_q + if cand_q: + yield from expand(subg_q, cand_q) + Q.pop() + + return expand(subg_init, cand_init) + + +@nx._dispatchable(returns_graph=True) +def make_max_clique_graph(G, create_using=None): + """Returns the maximal clique graph of the given graph. + + The nodes of the maximal clique graph of `G` are the cliques of + `G` and an edge joins two cliques if the cliques are not disjoint. + + Parameters + ---------- + G : NetworkX graph + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + NetworkX graph + A graph whose nodes are the cliques of `G` and whose edges + join two cliques if they are not disjoint. + + Notes + ----- + This function behaves like the following code:: + + import networkx as nx + + G = nx.make_clique_bipartite(G) + cliques = [v for v in G.nodes() if G.nodes[v]["bipartite"] == 0] + G = nx.bipartite.projected_graph(G, cliques) + G = nx.relabel_nodes(G, {-v: v - 1 for v in G}) + + It should be faster, though, since it skips all the intermediate + steps. + + """ + if create_using is None: + B = G.__class__() + else: + B = nx.empty_graph(0, create_using) + cliques = list(enumerate(set(c) for c in find_cliques(G))) + # Add a numbered node for each clique. + B.add_nodes_from(i for i, c in cliques) + # Join cliques by an edge if they share a node. + clique_pairs = combinations(cliques, 2) + B.add_edges_from((i, j) for (i, c1), (j, c2) in clique_pairs if c1 & c2) + return B + + +@nx._dispatchable(returns_graph=True) +def make_clique_bipartite(G, fpos=None, create_using=None, name=None): + """Returns the bipartite clique graph corresponding to `G`. + + In the returned bipartite graph, the "bottom" nodes are the nodes of + `G` and the "top" nodes represent the maximal cliques of `G`. + There is an edge from node *v* to clique *C* in the returned graph + if and only if *v* is an element of *C*. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + fpos : bool + If True or not None, the returned graph will have an + additional attribute, `pos`, a dictionary mapping node to + position in the Euclidean plane. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + NetworkX graph + A bipartite graph whose "bottom" set is the nodes of the graph + `G`, whose "top" set is the cliques of `G`, and whose edges + join nodes of `G` to the cliques that contain them. + + The nodes of the graph `G` have the node attribute + 'bipartite' set to 1 and the nodes representing cliques + have the node attribute 'bipartite' set to 0, as is the + convention for bipartite graphs in NetworkX. + + """ + B = nx.empty_graph(0, create_using) + B.clear() + # The "bottom" nodes in the bipartite graph are the nodes of the + # original graph, G. + B.add_nodes_from(G, bipartite=1) + for i, cl in enumerate(find_cliques(G)): + # The "top" nodes in the bipartite graph are the cliques. These + # nodes get negative numbers as labels. + name = -i - 1 + B.add_node(name, bipartite=0) + B.add_edges_from((v, name) for v in cl) + return B + + +@nx._dispatchable +def node_clique_number(G, nodes=None, cliques=None, separate_nodes=False): + """Returns the size of the largest maximal clique containing each given node. + + Returns a single or list depending on input nodes. + An optional list of cliques can be input if already computed. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + cliques : list, optional (default=None) + A list of cliques, each of which is itself a list of nodes. + If not specified, the list of all cliques will be computed + using :func:`find_cliques`. + + Returns + ------- + int or dict + If `nodes` is a single node, returns the size of the + largest maximal clique in `G` containing that node. + Otherwise return a dict keyed by node to the size + of the largest maximal clique containing that node. + + See Also + -------- + find_cliques + find_cliques yields the maximal cliques of G. + It accepts a `nodes` argument which restricts consideration to + maximal cliques containing all the given `nodes`. + The search for the cliques is optimized for `nodes`. + """ + if cliques is None: + if nodes is not None: + # Use ego_graph to decrease size of graph + # check for single node + if nodes in G: + return max(len(c) for c in find_cliques(nx.ego_graph(G, nodes))) + # handle multiple nodes + return { + n: max(len(c) for c in find_cliques(nx.ego_graph(G, n))) for n in nodes + } + + # nodes is None--find all cliques + cliques = list(find_cliques(G)) + + # single node requested + if nodes in G: + return max(len(c) for c in cliques if nodes in c) + + # multiple nodes requested + # preprocess all nodes (faster than one at a time for even 2 nodes) + size_for_n = defaultdict(int) + for c in cliques: + size_of_c = len(c) + for n in c: + if size_for_n[n] < size_of_c: + size_for_n[n] = size_of_c + if nodes is None: + return size_for_n + return {n: size_for_n[n] for n in nodes} + + +def number_of_cliques(G, nodes=None, cliques=None): + """Returns the number of maximal cliques for each node. + + Returns a single or list depending on input nodes. + Optional list of cliques can be input if already computed. + """ + if cliques is None: + cliques = find_cliques(G) + + if nodes is None: + nodes = list(G.nodes()) # none, get entire graph + + if not isinstance(nodes, list): # check for a list + v = nodes + # assume it is a single value + numcliq = sum(1 for c in cliques if v in c) + else: + numcliq = Counter(chain.from_iterable(cliques)) + numcliq = {v: numcliq[v] for v in nodes} # return a dict + return numcliq + + +class MaxWeightClique: + """A class for the maximum weight clique algorithm. + + This class is a helper for the `max_weight_clique` function. The class + should not normally be used directly. + + Parameters + ---------- + G : NetworkX graph + The undirected graph for which a maximum weight clique is sought + weight : string or None, optional (default='weight') + The node attribute that holds the integer value used as a weight. + If None, then each node has weight 1. + + Attributes + ---------- + G : NetworkX graph + The undirected graph for which a maximum weight clique is sought + node_weights: dict + The weight of each node + incumbent_nodes : list + The nodes of the incumbent clique (the best clique found so far) + incumbent_weight: int + The weight of the incumbent clique + """ + + def __init__(self, G, weight): + self.G = G + self.incumbent_nodes = [] + self.incumbent_weight = 0 + + if weight is None: + self.node_weights = dict.fromkeys(G.nodes(), 1) + else: + for v in G.nodes(): + if weight not in G.nodes[v]: + errmsg = f"Node {v!r} does not have the requested weight field." + raise KeyError(errmsg) + if not isinstance(G.nodes[v][weight], int): + errmsg = f"The {weight!r} field of node {v!r} is not an integer." + raise ValueError(errmsg) + self.node_weights = {v: G.nodes[v][weight] for v in G.nodes()} + + def update_incumbent_if_improved(self, C, C_weight): + """Update the incumbent if the node set C has greater weight. + + C is assumed to be a clique. + """ + if C_weight > self.incumbent_weight: + self.incumbent_nodes = C[:] + self.incumbent_weight = C_weight + + def greedily_find_independent_set(self, P): + """Greedily find an independent set of nodes from a set of + nodes P.""" + independent_set = [] + P = P[:] + while P: + v = P[0] + independent_set.append(v) + P = [w for w in P if v != w and not self.G.has_edge(v, w)] + return independent_set + + def find_branching_nodes(self, P, target): + """Find a set of nodes to branch on.""" + residual_wt = {v: self.node_weights[v] for v in P} + total_wt = 0 + P = P[:] + while P: + independent_set = self.greedily_find_independent_set(P) + min_wt_in_class = min(residual_wt[v] for v in independent_set) + total_wt += min_wt_in_class + if total_wt > target: + break + for v in independent_set: + residual_wt[v] -= min_wt_in_class + P = [v for v in P if residual_wt[v] != 0] + return P + + def expand(self, C, C_weight, P): + """Look for the best clique that contains all the nodes in C and zero or + more of the nodes in P, backtracking if it can be shown that no such + clique has greater weight than the incumbent. + """ + self.update_incumbent_if_improved(C, C_weight) + branching_nodes = self.find_branching_nodes(P, self.incumbent_weight - C_weight) + while branching_nodes: + v = branching_nodes.pop() + P.remove(v) + new_C = C + [v] + new_C_weight = C_weight + self.node_weights[v] + new_P = [w for w in P if self.G.has_edge(v, w)] + self.expand(new_C, new_C_weight, new_P) + + def find_max_weight_clique(self): + """Find a maximum weight clique.""" + # Sort nodes in reverse order of degree for speed + nodes = sorted(self.G.nodes(), key=lambda v: self.G.degree(v), reverse=True) + nodes = [v for v in nodes if self.node_weights[v] > 0] + self.expand([], 0, nodes) + + +@not_implemented_for("directed") +@nx._dispatchable(node_attrs="weight") +def max_weight_clique(G, weight="weight"): + """Find a maximum weight clique in G. + + A *clique* in a graph is a set of nodes such that every two distinct nodes + are adjacent. The *weight* of a clique is the sum of the weights of its + nodes. A *maximum weight clique* of graph G is a clique C in G such that + no clique in G has weight greater than the weight of C. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + weight : string or None, optional (default='weight') + The node attribute that holds the integer value used as a weight. + If None, then each node has weight 1. + + Returns + ------- + clique : list + the nodes of a maximum weight clique + weight : int + the weight of a maximum weight clique + + Notes + ----- + The implementation is recursive, and therefore it may run into recursion + depth issues if G contains a clique whose number of nodes is close to the + recursion depth limit. + + At each search node, the algorithm greedily constructs a weighted + independent set cover of part of the graph in order to find a small set of + nodes on which to branch. The algorithm is very similar to the algorithm + of Tavares et al. [1]_, other than the fact that the NetworkX version does + not use bitsets. This style of algorithm for maximum weight clique (and + maximum weight independent set, which is the same problem but on the + complement graph) has a decades-long history. See Algorithm B of Warren + and Hicks [2]_ and the references in that paper. + + References + ---------- + .. [1] Tavares, W.A., Neto, M.B.C., Rodrigues, C.D., Michelon, P.: Um + algoritmo de branch and bound para o problema da clique máxima + ponderada. Proceedings of XLVII SBPO 1 (2015). + + .. [2] Warren, Jeffrey S, Hicks, Illya V.: Combinatorial Branch-and-Bound + for the Maximum Weight Independent Set Problem. Technical Report, + Texas A&M University (2016). + """ + + mwc = MaxWeightClique(G, weight) + mwc.find_max_weight_clique() + return mwc.incumbent_nodes, mwc.incumbent_weight diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py new file mode 100644 index 0000000..ceb0c4e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/cluster.py @@ -0,0 +1,658 @@ +"""Algorithms to characterize the number of triangles in a graph.""" + +from collections import Counter +from itertools import chain, combinations + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "triangles", + "average_clustering", + "clustering", + "transitivity", + "square_clustering", + "generalized_degree", +] + + +@not_implemented_for("directed") +@nx._dispatchable +def triangles(G, nodes=None): + """Compute the number of triangles. + + Finds the number of triangles that include a node as one vertex. + + Parameters + ---------- + G : graph + A networkx graph + + nodes : node, iterable of nodes, or None (default=None) + If a singleton node, return the number of triangles for that node. + If an iterable, compute the number of triangles for each of those nodes. + If `None` (the default) compute the number of triangles for all nodes in `G`. + + Returns + ------- + out : dict or int + If `nodes` is a container of nodes, returns number of triangles keyed by node (dict). + If `nodes` is a specific node, returns number of triangles for the node (int). + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> print(nx.triangles(G, 0)) + 6 + >>> print(nx.triangles(G)) + {0: 6, 1: 6, 2: 6, 3: 6, 4: 6} + >>> print(list(nx.triangles(G, [0, 1]).values())) + [6, 6] + + Notes + ----- + Self loops are ignored. + + """ + if nodes is not None: + # If `nodes` represents a single node, return only its number of triangles + if nodes in G: + return next(_triangles_and_degree_iter(G, nodes))[2] // 2 + + # if `nodes` is a container of nodes, then return a + # dictionary mapping node to number of triangles. + return {v: t // 2 for v, d, t, _ in _triangles_and_degree_iter(G, nodes)} + + # if nodes is None, then compute triangles for the complete graph + + # dict used to avoid visiting the same nodes twice + # this allows calculating/counting each triangle only once + later_nbrs = {} + + # iterate over the nodes in a graph + for node, neighbors in G.adjacency(): + later_nbrs[node] = {n for n in neighbors if n not in later_nbrs and n != node} + + # instantiate Counter for each node to include isolated nodes + # add 1 to the count if a nodes neighbor's neighbor is also a neighbor + triangle_counts = Counter(dict.fromkeys(G, 0)) + for node1, neighbors in later_nbrs.items(): + for node2 in neighbors: + third_nodes = neighbors & later_nbrs[node2] + m = len(third_nodes) + triangle_counts[node1] += m + triangle_counts[node2] += m + triangle_counts.update(third_nodes) + + return dict(triangle_counts) + + +@not_implemented_for("multigraph") +def _triangles_and_degree_iter(G, nodes=None): + """Return an iterator of (node, degree, triangles, generalized degree). + + This double counts triangles so you may want to divide by 2. + See degree(), triangles() and generalized_degree() for definitions + and details. + + """ + if nodes is None: + nodes_nbrs = G.adj.items() + else: + nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes)) + + for v, v_nbrs in nodes_nbrs: + vs = set(v_nbrs) - {v} + gen_degree = Counter(len(vs & (set(G[w]) - {w})) for w in vs) + ntriangles = sum(k * val for k, val in gen_degree.items()) + yield (v, len(vs), ntriangles, gen_degree) + + +@not_implemented_for("multigraph") +def _weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"): + """Return an iterator of (node, degree, weighted_triangles). + + Used for weighted clustering. + Note: this returns the geometric average weight of edges in the triangle. + Also, each triangle is counted twice (each direction). + So you may want to divide by 2. + + """ + import numpy as np + + if weight is None or G.number_of_edges() == 0: + max_weight = 1 + else: + max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True)) + if nodes is None: + nodes_nbrs = G.adj.items() + else: + nodes_nbrs = ((n, G[n]) for n in G.nbunch_iter(nodes)) + + def wt(u, v): + return G[u][v].get(weight, 1) / max_weight + + for i, nbrs in nodes_nbrs: + inbrs = set(nbrs) - {i} + weighted_triangles = 0 + seen = set() + for j in inbrs: + seen.add(j) + # This avoids counting twice -- we double at the end. + jnbrs = set(G[j]) - seen + # Only compute the edge weight once, before the inner inner + # loop. + wij = wt(i, j) + weighted_triangles += np.cbrt( + [(wij * wt(j, k) * wt(k, i)) for k in inbrs & jnbrs] + ).sum() + yield (i, len(inbrs), 2 * float(weighted_triangles)) + + +@not_implemented_for("multigraph") +def _directed_triangles_and_degree_iter(G, nodes=None): + """Return an iterator of + (node, total_degree, reciprocal_degree, directed_triangles). + + Used for directed clustering. + Note that unlike `_triangles_and_degree_iter()`, this function counts + directed triangles so does not count triangles twice. + + """ + nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes)) + + for i, preds, succs in nodes_nbrs: + ipreds = set(preds) - {i} + isuccs = set(succs) - {i} + + directed_triangles = 0 + for j in chain(ipreds, isuccs): + jpreds = set(G._pred[j]) - {j} + jsuccs = set(G._succ[j]) - {j} + directed_triangles += sum( + 1 + for k in chain( + (ipreds & jpreds), + (ipreds & jsuccs), + (isuccs & jpreds), + (isuccs & jsuccs), + ) + ) + dtotal = len(ipreds) + len(isuccs) + dbidirectional = len(ipreds & isuccs) + yield (i, dtotal, dbidirectional, directed_triangles) + + +@not_implemented_for("multigraph") +def _directed_weighted_triangles_and_degree_iter(G, nodes=None, weight="weight"): + """Return an iterator of + (node, total_degree, reciprocal_degree, directed_weighted_triangles). + + Used for directed weighted clustering. + Note that unlike `_weighted_triangles_and_degree_iter()`, this function counts + directed triangles so does not count triangles twice. + + """ + import numpy as np + + if weight is None or G.number_of_edges() == 0: + max_weight = 1 + else: + max_weight = max(d.get(weight, 1) for u, v, d in G.edges(data=True)) + + nodes_nbrs = ((n, G._pred[n], G._succ[n]) for n in G.nbunch_iter(nodes)) + + def wt(u, v): + return G[u][v].get(weight, 1) / max_weight + + for i, preds, succs in nodes_nbrs: + ipreds = set(preds) - {i} + isuccs = set(succs) - {i} + + directed_triangles = 0 + for j in ipreds: + jpreds = set(G._pred[j]) - {j} + jsuccs = set(G._succ[j]) - {j} + directed_triangles += np.cbrt( + [(wt(j, i) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(j, i) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs] + ).sum() + + for j in isuccs: + jpreds = set(G._pred[j]) - {j} + jsuccs = set(G._succ[j]) - {j} + directed_triangles += np.cbrt( + [(wt(i, j) * wt(k, i) * wt(k, j)) for k in ipreds & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(k, i) * wt(j, k)) for k in ipreds & jsuccs] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(i, k) * wt(k, j)) for k in isuccs & jpreds] + ).sum() + directed_triangles += np.cbrt( + [(wt(i, j) * wt(i, k) * wt(j, k)) for k in isuccs & jsuccs] + ).sum() + + dtotal = len(ipreds) + len(isuccs) + dbidirectional = len(ipreds & isuccs) + yield (i, dtotal, dbidirectional, float(directed_triangles)) + + +@nx._dispatchable(edge_attrs="weight") +def average_clustering(G, nodes=None, weight=None, count_zeros=True): + r"""Compute the average clustering coefficient for the graph G. + + The clustering coefficient for the graph is the average, + + .. math:: + + C = \frac{1}{n}\sum_{v \in G} c_v, + + where :math:`n` is the number of nodes in `G`. + + Parameters + ---------- + G : graph + + nodes : container of nodes, optional (default=all nodes in G) + Compute average clustering for nodes in this container. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used as a weight. + If None, then each edge has weight 1. + + count_zeros : bool + If False include only the nodes with nonzero clustering in the average. + + Returns + ------- + avg : float + Average clustering + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> print(nx.average_clustering(G)) + 1.0 + + Notes + ----- + This is a space saving routine; it might be faster + to use the clustering function to get a list and then take the average. + + Self loops are ignored. + + References + ---------- + .. [1] Generalizations of the clustering coefficient to weighted + complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela, + K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007). + http://jponnela.com/web_documents/a9.pdf + .. [2] Marcus Kaiser, Mean clustering coefficients: the role of isolated + nodes and leafs on clustering measures for small-world networks. + https://arxiv.org/abs/0802.2512 + """ + c = clustering(G, nodes, weight=weight).values() + if not count_zeros: + c = [v for v in c if abs(v) > 0] + return sum(c) / len(c) + + +@nx._dispatchable(edge_attrs="weight") +def clustering(G, nodes=None, weight=None): + r"""Compute the clustering coefficient for nodes. + + For unweighted graphs, the clustering of a node :math:`u` + is the fraction of possible triangles through that node that exist, + + .. math:: + + c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)}, + + where :math:`T(u)` is the number of triangles through node :math:`u` and + :math:`deg(u)` is the degree of :math:`u`. + + For weighted graphs, there are several ways to define clustering [1]_. + the one used here is defined + as the geometric average of the subgraph edge weights [2]_, + + .. math:: + + c_u = \frac{1}{deg(u)(deg(u)-1))} + \sum_{vw} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}. + + The edge weights :math:`\hat{w}_{uv}` are normalized by the maximum weight + in the network :math:`\hat{w}_{uv} = w_{uv}/\max(w)`. + + The value of :math:`c_u` is assigned to 0 if :math:`deg(u) < 2`. + + Additionally, this weighted definition has been generalized to support negative edge weights [3]_. + + For directed graphs, the clustering is similarly defined as the fraction + of all possible directed triangles or geometric average of the subgraph + edge weights for unweighted and weighted directed graph respectively [4]_. + + .. math:: + + c_u = \frac{T(u)}{2(deg^{tot}(u)(deg^{tot}(u)-1) - 2deg^{\leftrightarrow}(u))}, + + where :math:`T(u)` is the number of directed triangles through node + :math:`u`, :math:`deg^{tot}(u)` is the sum of in degree and out degree of + :math:`u` and :math:`deg^{\leftrightarrow}(u)` is the reciprocal degree of + :math:`u`. + + + Parameters + ---------- + G : graph + + nodes : node, iterable of nodes, or None (default=None) + If a singleton node, return the number of triangles for that node. + If an iterable, compute the number of triangles for each of those nodes. + If `None` (the default) compute the number of triangles for all nodes in `G`. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used as a weight. + If None, then each edge has weight 1. + + Returns + ------- + out : float, or dictionary + Clustering coefficient at specified nodes + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> print(nx.clustering(G, 0)) + 1.0 + >>> print(nx.clustering(G)) + {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0} + + Notes + ----- + Self loops are ignored. + + References + ---------- + .. [1] Generalizations of the clustering coefficient to weighted + complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela, + K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007). + http://jponnela.com/web_documents/a9.pdf + .. [2] Intensity and coherence of motifs in weighted complex + networks by J. P. Onnela, J. Saramäki, J. Kertész, and K. Kaski, + Physical Review E, 71(6), 065103 (2005). + .. [3] Generalization of Clustering Coefficients to Signed Correlation Networks + by G. Costantini and M. Perugini, PloS one, 9(2), e88669 (2014). + .. [4] Clustering in complex directed networks by G. Fagiolo, + Physical Review E, 76(2), 026107 (2007). + """ + if G.is_directed(): + if weight is not None: + td_iter = _directed_weighted_triangles_and_degree_iter(G, nodes, weight) + clusterc = { + v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2) + for v, dt, db, t in td_iter + } + else: + td_iter = _directed_triangles_and_degree_iter(G, nodes) + clusterc = { + v: 0 if t == 0 else t / ((dt * (dt - 1) - 2 * db) * 2) + for v, dt, db, t in td_iter + } + else: + # The formula 2*T/(d*(d-1)) from docs is t/(d*(d-1)) here b/c t==2*T + if weight is not None: + td_iter = _weighted_triangles_and_degree_iter(G, nodes, weight) + clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t in td_iter} + else: + td_iter = _triangles_and_degree_iter(G, nodes) + clusterc = {v: 0 if t == 0 else t / (d * (d - 1)) for v, d, t, _ in td_iter} + if nodes in G: + # Return the value of the sole entry in the dictionary. + return clusterc[nodes] + return clusterc + + +@nx._dispatchable +def transitivity(G): + r"""Compute graph transitivity, the fraction of all possible triangles + present in G. + + Possible triangles are identified by the number of "triads" + (two edges with a shared vertex). + + The transitivity is + + .. math:: + + T = 3\frac{\#triangles}{\#triads}. + + Parameters + ---------- + G : graph + + Returns + ------- + out : float + Transitivity + + Notes + ----- + Self loops are ignored. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> print(nx.transitivity(G)) + 1.0 + """ + triangles_contri = [ + (t, d * (d - 1)) for v, d, t, _ in _triangles_and_degree_iter(G) + ] + # If the graph is empty + if len(triangles_contri) == 0: + return 0 + triangles, contri = map(sum, zip(*triangles_contri)) + return 0 if triangles == 0 else triangles / contri + + +@nx._dispatchable +def square_clustering(G, nodes=None): + r"""Compute the squares clustering coefficient for nodes. + + For each node return the fraction of possible squares that exist at + the node [1]_ + + .. math:: + C_4(v) = \frac{ \sum_{u=1}^{k_v} + \sum_{w=u+1}^{k_v} q_v(u,w) }{ \sum_{u=1}^{k_v} + \sum_{w=u+1}^{k_v} [a_v(u,w) + q_v(u,w)]}, + + where :math:`q_v(u,w)` are the number of common neighbors of :math:`u` and + :math:`w` other than :math:`v` (ie squares), and :math:`a_v(u,w) = (k_u - + (1+q_v(u,w)+\theta_{uv})) + (k_w - (1+q_v(u,w)+\theta_{uw}))`, where + :math:`\theta_{uw} = 1` if :math:`u` and :math:`w` are connected and 0 + otherwise. [2]_ + + Parameters + ---------- + G : graph + + nodes : container of nodes, optional (default=all nodes in G) + Compute clustering for nodes in this container. + + Returns + ------- + c4 : dictionary + A dictionary keyed by node with the square clustering coefficient value. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> print(nx.square_clustering(G, 0)) + 1.0 + >>> print(nx.square_clustering(G)) + {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0} + + Notes + ----- + Self loops are ignored. + + While :math:`C_3(v)` (triangle clustering) gives the probability that + two neighbors of node v are connected with each other, :math:`C_4(v)` is + the probability that two neighbors of node v share a common + neighbor different from v. This algorithm can be applied to both + bipartite and unipartite networks. + + References + ---------- + .. [1] Pedro G. Lind, Marta C. González, and Hans J. Herrmann. 2005 + Cycles and clustering in bipartite networks. + Physical Review E (72) 056127. + .. [2] Zhang, Peng et al. Clustering Coefficient and Community Structure of + Bipartite Networks. Physica A: Statistical Mechanics and its Applications 387.27 (2008): 6869–6875. + https://arxiv.org/abs/0710.0117v1 + """ + if nodes is None: + node_iter = G + else: + node_iter = G.nbunch_iter(nodes) + clustering = {} + _G_adj = G._adj + + class GAdj(dict): + """Calculate (and cache) node neighbor sets excluding self-loops.""" + + def __missing__(self, v): + v_neighbors = self[v] = set(_G_adj[v]) + v_neighbors.discard(v) # Ignore self-loops + return v_neighbors + + G_adj = GAdj() # Values are sets of neighbors (no self-loops) + + for v in node_iter: + v_neighbors = G_adj[v] + v_degrees_m1 = len(v_neighbors) - 1 # degrees[v] - 1 (used below) + if v_degrees_m1 <= 0: + # Can't form a square without at least two neighbors + clustering[v] = 0 + continue + + # Count squares with nodes u-v-w-x from the current node v. + # Terms of the denominator: potential = uw_degrees - uw_count - triangles - squares + # uw_degrees: degrees[u] + degrees[w] for each u-w combo + uw_degrees = 0 + # uw_count: 1 for each u and 1 for each w for all combos (degrees * (degrees - 1)) + uw_count = len(v_neighbors) * v_degrees_m1 + # triangles: 1 for each edge where u-w or w-u are connected (i.e. triangles) + triangles = 0 + # squares: the number of squares (also the numerator) + squares = 0 + + # Iterate over all neighbors + for u in v_neighbors: + u_neighbors = G_adj[u] + uw_degrees += len(u_neighbors) * v_degrees_m1 + # P2 from https://arxiv.org/abs/2007.11111 + p2 = len(u_neighbors & v_neighbors) + # triangles is C_3, sigma_4 from https://arxiv.org/abs/2007.11111 + # This double-counts triangles compared to `triangles` function + triangles += p2 + # squares is C_4, sigma_12 from https://arxiv.org/abs/2007.11111 + # Include this term, b/c a neighbor u can also be a neighbor of neighbor x + squares += p2 * (p2 - 1) # Will divide by 2 later + + # And iterate over all neighbors of neighbors. + # These nodes x may be the corners opposite v in squares u-v-w-x. + two_hop_neighbors = set.union(*(G_adj[u] for u in v_neighbors)) + two_hop_neighbors -= v_neighbors # Neighbors already counted above + two_hop_neighbors.discard(v) + for x in two_hop_neighbors: + p2 = len(v_neighbors & G_adj[x]) + squares += p2 * (p2 - 1) # Will divide by 2 later + + squares //= 2 + potential = uw_degrees - uw_count - triangles - squares + if potential > 0: + clustering[v] = squares / potential + else: + clustering[v] = 0 + if nodes in G: + # Return the value of the sole entry in the dictionary. + return clustering[nodes] + return clustering + + +@not_implemented_for("directed") +@nx._dispatchable +def generalized_degree(G, nodes=None): + r"""Compute the generalized degree for nodes. + + For each node, the generalized degree shows how many edges of given + triangle multiplicity the node is connected to. The triangle multiplicity + of an edge is the number of triangles an edge participates in. The + generalized degree of node :math:`i` can be written as a vector + :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, k_i^{(N-2)})` where + :math:`k_i^{(j)}` is the number of edges attached to node :math:`i` that + participate in :math:`j` triangles. + + Parameters + ---------- + G : graph + + nodes : container of nodes, optional (default=all nodes in G) + Compute the generalized degree for nodes in this container. + + Returns + ------- + out : Counter, or dictionary of Counters + Generalized degree of specified nodes. The Counter is keyed by edge + triangle multiplicity. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> print(nx.generalized_degree(G, 0)) + Counter({3: 4}) + >>> print(nx.generalized_degree(G)) + {0: Counter({3: 4}), 1: Counter({3: 4}), 2: Counter({3: 4}), 3: Counter({3: 4}), 4: Counter({3: 4})} + + To recover the number of triangles attached to a node: + + >>> k1 = nx.generalized_degree(G, 0) + >>> sum([k * v for k, v in k1.items()]) / 2 == nx.triangles(G, 0) + True + + Notes + ----- + Self loops are ignored. + + In a network of N nodes, the highest triangle multiplicity an edge can have + is N-2. + + The return value does not include a `zero` entry if no edges of a + particular triangle multiplicity are present. + + The number of triangles node :math:`i` is attached to can be recovered from + the generalized degree :math:`\mathbf{k}_i=(k_i^{(0)}, \dotsc, + k_i^{(N-2)})` by :math:`(k_i^{(1)}+2k_i^{(2)}+\dotsc +(N-2)k_i^{(N-2)})/2`. + + References + ---------- + .. [1] Networks with arbitrary edge multiplicities by V. Zlatić, + D. Garlaschelli and G. Caldarelli, EPL (Europhysics Letters), + Volume 97, Number 2 (2012). + https://iopscience.iop.org/article/10.1209/0295-5075/97/28005 + """ + if nodes in G: + return next(_triangles_and_degree_iter(G, nodes))[3] + return {v: gd for v, d, t, gd in _triangles_and_degree_iter(G, nodes)} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__init__.py new file mode 100644 index 0000000..39381d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__init__.py @@ -0,0 +1,4 @@ +from networkx.algorithms.coloring.greedy_coloring import * +from networkx.algorithms.coloring.equitable_coloring import equitable_color + +__all__ = ["greedy_color", "equitable_color"] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b1f482b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-312.pyc new file mode 100644 index 0000000..004ec5b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/equitable_coloring.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-312.pyc new file mode 100644 index 0000000..c387513 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/__pycache__/greedy_coloring.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/equitable_coloring.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/equitable_coloring.py new file mode 100644 index 0000000..e464a07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/equitable_coloring.py @@ -0,0 +1,505 @@ +""" +Equitable coloring of graphs with bounded degree. +""" + +from collections import defaultdict + +import networkx as nx + +__all__ = ["equitable_color"] + + +@nx._dispatchable +def is_coloring(G, coloring): + """Determine if the coloring is a valid coloring for the graph G.""" + # Verify that the coloring is valid. + return all(coloring[s] != coloring[d] for s, d in G.edges) + + +@nx._dispatchable +def is_equitable(G, coloring, num_colors=None): + """Determines if the coloring is valid and equitable for the graph G.""" + + if not is_coloring(G, coloring): + return False + + # Verify whether it is equitable. + color_set_size = defaultdict(int) + for color in coloring.values(): + color_set_size[color] += 1 + + if num_colors is not None: + for color in range(num_colors): + if color not in color_set_size: + # These colors do not have any vertices attached to them. + color_set_size[color] = 0 + + # If there are more than 2 distinct values, the coloring cannot be equitable + all_set_sizes = set(color_set_size.values()) + if len(all_set_sizes) == 0 and num_colors is None: # Was an empty graph + return True + elif len(all_set_sizes) == 1: + return True + elif len(all_set_sizes) == 2: + a, b = list(all_set_sizes) + return abs(a - b) <= 1 + else: # len(all_set_sizes) > 2: + return False + + +def make_C_from_F(F): + C = defaultdict(list) + for node, color in F.items(): + C[color].append(node) + + return C + + +def make_N_from_L_C(L, C): + nodes = L.keys() + colors = C.keys() + return { + (node, color): sum(1 for v in L[node] if v in C[color]) + for node in nodes + for color in colors + } + + +def make_H_from_C_N(C, N): + return { + (c1, c2): sum(1 for node in C[c1] if N[(node, c2)] == 0) for c1 in C for c2 in C + } + + +def change_color(u, X, Y, N, H, F, C, L): + """Change the color of 'u' from X to Y and update N, H, F, C.""" + assert F[u] == X and X != Y + + # Change the class of 'u' from X to Y + F[u] = Y + + for k in C: + # 'u' witnesses an edge from k -> Y instead of from k -> X now. + if N[u, k] == 0: + H[(X, k)] -= 1 + H[(Y, k)] += 1 + + for v in L[u]: + # 'v' has lost a neighbor in X and gained one in Y + N[(v, X)] -= 1 + N[(v, Y)] += 1 + + if N[(v, X)] == 0: + # 'v' witnesses F[v] -> X + H[(F[v], X)] += 1 + + if N[(v, Y)] == 1: + # 'v' no longer witnesses F[v] -> Y + H[(F[v], Y)] -= 1 + + C[X].remove(u) + C[Y].append(u) + + +def move_witnesses(src_color, dst_color, N, H, F, C, T_cal, L): + """Move witness along a path from src_color to dst_color.""" + X = src_color + while X != dst_color: + Y = T_cal[X] + # Move _any_ witness from X to Y = T_cal[X] + w = next(x for x in C[X] if N[(x, Y)] == 0) + change_color(w, X, Y, N=N, H=H, F=F, C=C, L=L) + X = Y + + +@nx._dispatchable(mutates_input=True) +def pad_graph(G, num_colors): + """Add a disconnected complete clique K_p such that the number of nodes in + the graph becomes a multiple of `num_colors`. + + Assumes that the graph's nodes are labelled using integers. + + Returns the number of nodes with each color. + """ + + n_ = len(G) + r = num_colors - 1 + + # Ensure that the number of nodes in G is a multiple of (r + 1) + s = n_ // (r + 1) + if n_ != s * (r + 1): + p = (r + 1) - n_ % (r + 1) + s += 1 + + # Complete graph K_p between (imaginary) nodes [n_, ... , n_ + p] + K = nx.relabel_nodes(nx.complete_graph(p), {idx: idx + n_ for idx in range(p)}) + G.add_edges_from(K.edges) + + return s + + +def procedure_P(V_minus, V_plus, N, H, F, C, L, excluded_colors=None): + """Procedure P as described in the paper.""" + + if excluded_colors is None: + excluded_colors = set() + + A_cal = set() + T_cal = {} + R_cal = [] + + # BFS to determine A_cal, i.e. colors reachable from V- + reachable = [V_minus] + marked = set(reachable) + idx = 0 + + while idx < len(reachable): + pop = reachable[idx] + idx += 1 + + A_cal.add(pop) + R_cal.append(pop) + + # TODO: Checking whether a color has been visited can be made faster by + # using a look-up table instead of testing for membership in a set by a + # logarithmic factor. + next_layer = [] + for k in C: + if ( + H[(k, pop)] > 0 + and k not in A_cal + and k not in excluded_colors + and k not in marked + ): + next_layer.append(k) + + for dst in next_layer: + # Record that `dst` can reach `pop` + T_cal[dst] = pop + + marked.update(next_layer) + reachable.extend(next_layer) + + # Variables for the algorithm + b = len(C) - len(A_cal) + + if V_plus in A_cal: + # Easy case: V+ is in A_cal + # Move one node from V+ to V- using T_cal to find the parents. + move_witnesses(V_plus, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L) + else: + # If there is a solo edge, we can resolve the situation by + # moving witnesses from B to A, making G[A] equitable and then + # recursively balancing G[B - w] with a different V_minus and + # but the same V_plus. + + A_0 = set() + A_cal_0 = set() + num_terminal_sets_found = 0 + made_equitable = False + + for W_1 in R_cal[::-1]: + for v in C[W_1]: + X = None + + for U in C: + if N[(v, U)] == 0 and U in A_cal and U != W_1: + X = U + + # v does not witness an edge in H[A_cal] + if X is None: + continue + + for U in C: + # Note: Departing from the paper here. + if N[(v, U)] >= 1 and U not in A_cal: + X_prime = U + w = v + + try: + # Finding the solo neighbor of w in X_prime + y = next( + node + for node in L[w] + if F[node] == X_prime and N[(node, W_1)] == 1 + ) + except StopIteration: + pass + else: + W = W_1 + + # Move w from W to X, now X has one extra node. + change_color(w, W, X, N=N, H=H, F=F, C=C, L=L) + + # Move witness from X to V_minus, making the coloring + # equitable. + move_witnesses( + src_color=X, + dst_color=V_minus, + N=N, + H=H, + F=F, + C=C, + T_cal=T_cal, + L=L, + ) + + # Move y from X_prime to W, making W the correct size. + change_color(y, X_prime, W, N=N, H=H, F=F, C=C, L=L) + + # Then call the procedure on G[B - y] + procedure_P( + V_minus=X_prime, + V_plus=V_plus, + N=N, + H=H, + C=C, + F=F, + L=L, + excluded_colors=excluded_colors.union(A_cal), + ) + made_equitable = True + break + + if made_equitable: + break + else: + # No node in W_1 was found such that + # it had a solo-neighbor. + A_cal_0.add(W_1) + A_0.update(C[W_1]) + num_terminal_sets_found += 1 + + if num_terminal_sets_found == b: + # Otherwise, construct the maximal independent set and find + # a pair of z_1, z_2 as in Case II. + + # BFS to determine B_cal': the set of colors reachable from V+ + B_cal_prime = set() + T_cal_prime = {} + + reachable = [V_plus] + marked = set(reachable) + idx = 0 + while idx < len(reachable): + pop = reachable[idx] + idx += 1 + + B_cal_prime.add(pop) + + # No need to check for excluded_colors here because + # they only exclude colors from A_cal + next_layer = [ + k + for k in C + if H[(pop, k)] > 0 and k not in B_cal_prime and k not in marked + ] + + for dst in next_layer: + T_cal_prime[pop] = dst + + marked.update(next_layer) + reachable.extend(next_layer) + + # Construct the independent set of G[B'] + I_set = set() + I_covered = set() + W_covering = {} + + B_prime = [node for k in B_cal_prime for node in C[k]] + + # Add the nodes in V_plus to I first. + for z in C[V_plus] + B_prime: + if z in I_covered or F[z] not in B_cal_prime: + continue + + I_set.add(z) + I_covered.add(z) + I_covered.update(list(L[z])) + + for w in L[z]: + if F[w] in A_cal_0 and N[(z, F[w])] == 1: + if w not in W_covering: + W_covering[w] = z + else: + # Found z1, z2 which have the same solo + # neighbor in some W + z_1 = W_covering[w] + # z_2 = z + + Z = F[z_1] + W = F[w] + + # shift nodes along W, V- + move_witnesses( + W, V_minus, N=N, H=H, F=F, C=C, T_cal=T_cal, L=L + ) + + # shift nodes along V+ to Z + move_witnesses( + V_plus, + Z, + N=N, + H=H, + F=F, + C=C, + T_cal=T_cal_prime, + L=L, + ) + + # change color of z_1 to W + change_color(z_1, Z, W, N=N, H=H, F=F, C=C, L=L) + + # change color of w to some color in B_cal + W_plus = next( + k for k in C if N[(w, k)] == 0 and k not in A_cal + ) + change_color(w, W, W_plus, N=N, H=H, F=F, C=C, L=L) + + # recurse with G[B \cup W*] + excluded_colors.update( + [k for k in C if k != W and k not in B_cal_prime] + ) + procedure_P( + V_minus=W, + V_plus=W_plus, + N=N, + H=H, + C=C, + F=F, + L=L, + excluded_colors=excluded_colors, + ) + + made_equitable = True + break + + if made_equitable: + break + else: + assert False, ( + "Must find a w which is the solo neighbor " + "of two vertices in B_cal_prime." + ) + + if made_equitable: + break + + +@nx._dispatchable +def equitable_color(G, num_colors): + """Provides an equitable coloring for nodes of `G`. + + Attempts to color a graph using `num_colors` colors, where no neighbors of + a node can have same color as the node itself and the number of nodes with + each color differ by at most 1. `num_colors` must be greater than the + maximum degree of `G`. The algorithm is described in [1]_ and has + complexity O(num_colors * n**2). + + Parameters + ---------- + G : networkX graph + The nodes of this graph will be colored. + + num_colors : number of colors to use + This number must be at least one more than the maximum degree of nodes + in the graph. + + Returns + ------- + A dictionary with keys representing nodes and values representing + corresponding coloring. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> nx.coloring.equitable_color(G, num_colors=3) # doctest: +SKIP + {0: 2, 1: 1, 2: 2, 3: 0} + + Raises + ------ + NetworkXAlgorithmError + If `num_colors` is not at least the maximum degree of the graph `G` + + References + ---------- + .. [1] Kierstead, H. A., Kostochka, A. V., Mydlarz, M., & Szemerédi, E. + (2010). A fast algorithm for equitable coloring. Combinatorica, 30(2), + 217-224. + """ + + # Map nodes to integers for simplicity later. + nodes_to_int = {} + int_to_nodes = {} + + for idx, node in enumerate(G.nodes): + nodes_to_int[node] = idx + int_to_nodes[idx] = node + + G = nx.relabel_nodes(G, nodes_to_int, copy=True) + + # Basic graph statistics and sanity check. + if len(G.nodes) > 0: + r_ = max(G.degree(node) for node in G.nodes) + else: + r_ = 0 + + if r_ >= num_colors: + raise nx.NetworkXAlgorithmError( + f"Graph has maximum degree {r_}, needs " + f"{r_ + 1} (> {num_colors}) colors for guaranteed coloring." + ) + + # Ensure that the number of nodes in G is a multiple of (r + 1) + pad_graph(G, num_colors) + + # Starting the algorithm. + # L = {node: list(G.neighbors(node)) for node in G.nodes} + L_ = {node: [] for node in G.nodes} + + # Arbitrary equitable allocation of colors to nodes. + F = {node: idx % num_colors for idx, node in enumerate(G.nodes)} + + C = make_C_from_F(F) + + # The neighborhood is empty initially. + N = make_N_from_L_C(L_, C) + + # Currently all nodes witness all edges. + H = make_H_from_C_N(C, N) + + # Start of algorithm. + edges_seen = set() + + for u in sorted(G.nodes): + for v in sorted(G.neighbors(u)): + # Do not double count edges if (v, u) has already been seen. + if (v, u) in edges_seen: + continue + + edges_seen.add((u, v)) + + L_[u].append(v) + L_[v].append(u) + + N[(u, F[v])] += 1 + N[(v, F[u])] += 1 + + if F[u] != F[v]: + # Were 'u' and 'v' witnesses for F[u] -> F[v] or F[v] -> F[u]? + if N[(u, F[v])] == 1: + H[F[u], F[v]] -= 1 # u cannot witness an edge between F[u], F[v] + + if N[(v, F[u])] == 1: + H[F[v], F[u]] -= 1 # v cannot witness an edge between F[v], F[u] + + if N[(u, F[u])] != 0: + # Find the first color where 'u' does not have any neighbors. + Y = next(k for k in C if N[(u, k)] == 0) + X = F[u] + change_color(u, X, Y, N=N, H=H, F=F, C=C, L=L_) + + # Procedure P + procedure_P(V_minus=X, V_plus=Y, N=N, H=H, F=F, C=C, L=L_) + + return {int_to_nodes[x]: F[x] for x in int_to_nodes} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/greedy_coloring.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/greedy_coloring.py new file mode 100644 index 0000000..311bc3a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/greedy_coloring.py @@ -0,0 +1,565 @@ +""" +Greedy graph coloring using various strategies. +""" + +import itertools +from collections import defaultdict, deque + +import networkx as nx +from networkx.utils import arbitrary_element, py_random_state + +__all__ = [ + "greedy_color", + "strategy_connected_sequential", + "strategy_connected_sequential_bfs", + "strategy_connected_sequential_dfs", + "strategy_independent_set", + "strategy_largest_first", + "strategy_random_sequential", + "strategy_saturation_largest_first", + "strategy_smallest_last", +] + + +def strategy_largest_first(G, colors): + """Returns a list of the nodes of ``G`` in decreasing order by + degree. + + ``G`` is a NetworkX graph. ``colors`` is ignored. + + """ + return sorted(G, key=G.degree, reverse=True) + + +@py_random_state(2) +def strategy_random_sequential(G, colors, seed=None): + """Returns a random permutation of the nodes of ``G`` as a list. + + ``G`` is a NetworkX graph. ``colors`` is ignored. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + """ + nodes = list(G) + seed.shuffle(nodes) + return nodes + + +def strategy_smallest_last(G, colors): + """Returns a deque of the nodes of ``G``, "smallest" last. + + Specifically, the degrees of each node are tracked in a bucket queue. + From this, the node of minimum degree is repeatedly popped from the + graph, updating its neighbors' degrees. + + ``G`` is a NetworkX graph. ``colors`` is ignored. + + This implementation of the strategy runs in $O(n + m)$ time + (ignoring polylogarithmic factors), where $n$ is the number of nodes + and $m$ is the number of edges. + + This strategy is related to :func:`strategy_independent_set`: if we + interpret each node removed as an independent set of size one, then + this strategy chooses an independent set of size one instead of a + maximal independent set. + + """ + H = G.copy() + result = deque() + + # Build initial degree list (i.e. the bucket queue data structure) + degrees = defaultdict(set) # set(), for fast random-access removals + lbound = float("inf") + for node, d in H.degree(): + degrees[d].add(node) + lbound = min(lbound, d) # Lower bound on min-degree. + + def find_min_degree(): + # Save time by starting the iterator at `lbound`, not 0. + # The value that we find will be our new `lbound`, which we set later. + return next(d for d in itertools.count(lbound) if d in degrees) + + for _ in G: + # Pop a min-degree node and add it to the list. + min_degree = find_min_degree() + u = degrees[min_degree].pop() + if not degrees[min_degree]: # Clean up the degree list. + del degrees[min_degree] + result.appendleft(u) + + # Update degrees of removed node's neighbors. + for v in H[u]: + degree = H.degree(v) + degrees[degree].remove(v) + if not degrees[degree]: # Clean up the degree list. + del degrees[degree] + degrees[degree - 1].add(v) + + # Finally, remove the node. + H.remove_node(u) + lbound = min_degree - 1 # Subtract 1 in case of tied neighbors. + + return result + + +def _maximal_independent_set(G): + """Returns a maximal independent set of nodes in ``G`` by repeatedly + choosing an independent node of minimum degree (with respect to the + subgraph of unchosen nodes). + + """ + result = set() + remaining = set(G) + while remaining: + G = G.subgraph(remaining) + v = min(remaining, key=G.degree) + result.add(v) + remaining -= set(G[v]) | {v} + return result + + +def strategy_independent_set(G, colors): + """Uses a greedy independent set removal strategy to determine the + colors. + + This function updates ``colors`` **in-place** and return ``None``, + unlike the other strategy functions in this module. + + This algorithm repeatedly finds and removes a maximal independent + set, assigning each node in the set an unused color. + + ``G`` is a NetworkX graph. + + This strategy is related to :func:`strategy_smallest_last`: in that + strategy, an independent set of size one is chosen at each step + instead of a maximal independent set. + + """ + remaining_nodes = set(G) + while len(remaining_nodes) > 0: + nodes = _maximal_independent_set(G.subgraph(remaining_nodes)) + remaining_nodes -= nodes + yield from nodes + + +def strategy_connected_sequential_bfs(G, colors): + """Returns an iterable over nodes in ``G`` in the order given by a + breadth-first traversal. + + The generated sequence has the property that for each node except + the first, at least one neighbor appeared earlier in the sequence. + + ``G`` is a NetworkX graph. ``colors`` is ignored. + + """ + return strategy_connected_sequential(G, colors, "bfs") + + +def strategy_connected_sequential_dfs(G, colors): + """Returns an iterable over nodes in ``G`` in the order given by a + depth-first traversal. + + The generated sequence has the property that for each node except + the first, at least one neighbor appeared earlier in the sequence. + + ``G`` is a NetworkX graph. ``colors`` is ignored. + + """ + return strategy_connected_sequential(G, colors, "dfs") + + +def strategy_connected_sequential(G, colors, traversal="bfs"): + """Returns an iterable over nodes in ``G`` in the order given by a + breadth-first or depth-first traversal. + + ``traversal`` must be one of the strings ``'dfs'`` or ``'bfs'``, + representing depth-first traversal or breadth-first traversal, + respectively. + + The generated sequence has the property that for each node except + the first, at least one neighbor appeared earlier in the sequence. + + ``G`` is a NetworkX graph. ``colors`` is ignored. + + """ + if traversal == "bfs": + traverse = nx.bfs_edges + elif traversal == "dfs": + traverse = nx.dfs_edges + else: + raise nx.NetworkXError( + "Please specify one of the strings 'bfs' or" + " 'dfs' for connected sequential ordering" + ) + for component in nx.connected_components(G): + source = arbitrary_element(component) + # Yield the source node, then all the nodes in the specified + # traversal order. + yield source + for _, end in traverse(G.subgraph(component), source): + yield end + + +def strategy_saturation_largest_first(G, colors): + """Iterates over all the nodes of ``G`` in "saturation order" (also + known as "DSATUR"). + + ``G`` is a NetworkX graph. ``colors`` is a dictionary mapping nodes of + ``G`` to colors, for those nodes that have already been colored. + + """ + distinct_colors = {v: set() for v in G} + + # Add the node color assignments given in colors to the + # distinct colors set for each neighbor of that node + for node, color in colors.items(): + for neighbor in G[node]: + distinct_colors[neighbor].add(color) + + # Check that the color assignments in colors are valid + # i.e. no neighboring nodes have the same color + if len(colors) >= 2: + for node, color in colors.items(): + if color in distinct_colors[node]: + raise nx.NetworkXError("Neighboring nodes must have different colors") + + # If 0 nodes have been colored, simply choose the node of highest degree. + if not colors: + node = max(G, key=G.degree) + yield node + # Add the color 0 to the distinct colors set for each + # neighbor of that node. + for v in G[node]: + distinct_colors[v].add(0) + + while len(G) != len(colors): + # Update the distinct color sets for the neighbors. + for node, color in colors.items(): + for neighbor in G[node]: + distinct_colors[neighbor].add(color) + + # Compute the maximum saturation and the set of nodes that + # achieve that saturation. + saturation = {v: len(c) for v, c in distinct_colors.items() if v not in colors} + # Yield the node with the highest saturation, and break ties by + # degree. + node = max(saturation, key=lambda v: (saturation[v], G.degree(v))) + yield node + + +#: Dictionary mapping name of a strategy as a string to the strategy function. +STRATEGIES = { + "largest_first": strategy_largest_first, + "random_sequential": strategy_random_sequential, + "smallest_last": strategy_smallest_last, + "independent_set": strategy_independent_set, + "connected_sequential_bfs": strategy_connected_sequential_bfs, + "connected_sequential_dfs": strategy_connected_sequential_dfs, + "connected_sequential": strategy_connected_sequential, + "saturation_largest_first": strategy_saturation_largest_first, + "DSATUR": strategy_saturation_largest_first, +} + + +@nx._dispatchable +def greedy_color(G, strategy="largest_first", interchange=False): + """Color a graph using various strategies of greedy graph coloring. + + Attempts to color a graph using as few colors as possible, where no + neighbors of a node can have same color as the node itself. The + given strategy determines the order in which nodes are colored. + + The strategies are described in [1]_, and smallest-last is based on + [2]_. + + Parameters + ---------- + G : NetworkX graph + + strategy : string or function(G, colors) + A function (or a string representing a function) that provides + the coloring strategy, by returning nodes in the ordering they + should be colored. ``G`` is the graph, and ``colors`` is a + dictionary of the currently assigned colors, keyed by nodes. The + function must return an iterable over all the nodes in ``G``. + + If the strategy function is an iterator generator (that is, a + function with ``yield`` statements), keep in mind that the + ``colors`` dictionary will be updated after each ``yield``, since + this function chooses colors greedily. + + If ``strategy`` is a string, it must be one of the following, + each of which represents one of the built-in strategy functions. + + * ``'largest_first'`` + * ``'random_sequential'`` + * ``'smallest_last'`` + * ``'independent_set'`` + * ``'connected_sequential_bfs'`` + * ``'connected_sequential_dfs'`` + * ``'connected_sequential'`` (alias for the previous strategy) + * ``'saturation_largest_first'`` + * ``'DSATUR'`` (alias for the previous strategy) + + interchange: bool + Will use the color interchange algorithm described by [3]_ if set + to ``True``. + + Note that ``saturation_largest_first`` and ``independent_set`` + do not work with interchange. Furthermore, if you use + interchange with your own strategy function, you cannot rely + on the values in the ``colors`` argument. + + Returns + ------- + A dictionary with keys representing nodes and values representing + corresponding coloring. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> d = nx.coloring.greedy_color(G, strategy="largest_first") + >>> d in [{0: 0, 1: 1, 2: 0, 3: 1}, {0: 1, 1: 0, 2: 1, 3: 0}] + True + + Raises + ------ + NetworkXPointlessConcept + If ``strategy`` is ``saturation_largest_first`` or + ``independent_set`` and ``interchange`` is ``True``. + + References + ---------- + .. [1] Adrian Kosowski, and Krzysztof Manuszewski, + Classical Coloring of Graphs, Graph Colorings, 2-19, 2004. + ISBN 0-8218-3458-4. + .. [2] David W. Matula, and Leland L. Beck, "Smallest-last + ordering and clustering and graph coloring algorithms." *J. ACM* 30, + 3 (July 1983), 417–427. + .. [3] Maciej M. Sysło, Narsingh Deo, Janusz S. Kowalik, + Discrete Optimization Algorithms with Pascal Programs, 415-424, 1983. + ISBN 0-486-45353-7. + + """ + if len(G) == 0: + return {} + # Determine the strategy provided by the caller. + strategy = STRATEGIES.get(strategy, strategy) + if not callable(strategy): + raise nx.NetworkXError( + f"strategy must be callable or a valid string. {strategy} not valid." + ) + # Perform some validation on the arguments before executing any + # strategy functions. + if interchange: + if strategy is strategy_independent_set: + msg = "interchange cannot be used with independent_set" + raise nx.NetworkXPointlessConcept(msg) + if strategy is strategy_saturation_largest_first: + msg = "interchange cannot be used with saturation_largest_first" + raise nx.NetworkXPointlessConcept(msg) + colors = {} + nodes = strategy(G, colors) + if interchange: + return _greedy_coloring_with_interchange(G, nodes) + for u in nodes: + # Set to keep track of colors of neighbors + nbr_colors = {colors[v] for v in G[u] if v in colors} + # Find the first unused color. + for color in itertools.count(): + if color not in nbr_colors: + break + # Assign the new color to the current node. + colors[u] = color + return colors + + +# Tools for coloring with interchanges +class _Node: + __slots__ = ["node_id", "color", "adj_list", "adj_color"] + + def __init__(self, node_id, n): + self.node_id = node_id + self.color = -1 + self.adj_list = None + self.adj_color = [None for _ in range(n)] + + def __repr__(self): + return ( + f"Node_id: {self.node_id}, Color: {self.color}, " + f"Adj_list: ({self.adj_list}), adj_color: ({self.adj_color})" + ) + + def assign_color(self, adj_entry, color): + adj_entry.col_prev = None + adj_entry.col_next = self.adj_color[color] + self.adj_color[color] = adj_entry + if adj_entry.col_next is not None: + adj_entry.col_next.col_prev = adj_entry + + def clear_color(self, adj_entry, color): + if adj_entry.col_prev is None: + self.adj_color[color] = adj_entry.col_next + else: + adj_entry.col_prev.col_next = adj_entry.col_next + if adj_entry.col_next is not None: + adj_entry.col_next.col_prev = adj_entry.col_prev + + def iter_neighbors(self): + adj_node = self.adj_list + while adj_node is not None: + yield adj_node + adj_node = adj_node.next + + def iter_neighbors_color(self, color): + adj_color_node = self.adj_color[color] + while adj_color_node is not None: + yield adj_color_node.node_id + adj_color_node = adj_color_node.col_next + + +class _AdjEntry: + __slots__ = ["node_id", "next", "mate", "col_next", "col_prev"] + + def __init__(self, node_id): + self.node_id = node_id + self.next = None + self.mate = None + self.col_next = None + self.col_prev = None + + def __repr__(self): + col_next = None if self.col_next is None else self.col_next.node_id + col_prev = None if self.col_prev is None else self.col_prev.node_id + return ( + f"Node_id: {self.node_id}, Next: ({self.next}), " + f"Mate: ({self.mate.node_id}), " + f"col_next: ({col_next}), col_prev: ({col_prev})" + ) + + +def _greedy_coloring_with_interchange(G, nodes): + """Return a coloring for `original_graph` using interchange approach + + This procedure is an adaption of the algorithm described by [1]_, + and is an implementation of coloring with interchange. Please be + advised, that the datastructures used are rather complex because + they are optimized to minimize the time spent identifying + subcomponents of the graph, which are possible candidates for color + interchange. + + Parameters + ---------- + G : NetworkX graph + The graph to be colored + + nodes : list + nodes ordered using the strategy of choice + + Returns + ------- + dict : + A dictionary keyed by node to a color value + + References + ---------- + .. [1] Maciej M. Syslo, Narsingh Deo, Janusz S. Kowalik, + Discrete Optimization Algorithms with Pascal Programs, 415-424, 1983. + ISBN 0-486-45353-7. + """ + n = len(G) + + graph = {node: _Node(node, n) for node in G} + + for node1, node2 in G.edges(): + adj_entry1 = _AdjEntry(node2) + adj_entry2 = _AdjEntry(node1) + adj_entry1.mate = adj_entry2 + adj_entry2.mate = adj_entry1 + node1_head = graph[node1].adj_list + adj_entry1.next = node1_head + graph[node1].adj_list = adj_entry1 + node2_head = graph[node2].adj_list + adj_entry2.next = node2_head + graph[node2].adj_list = adj_entry2 + + k = 0 + for node in nodes: + # Find the smallest possible, unused color + neighbors = graph[node].iter_neighbors() + col_used = {graph[adj_node.node_id].color for adj_node in neighbors} + col_used.discard(-1) + k1 = next(itertools.dropwhile(lambda x: x in col_used, itertools.count())) + + # k1 is now the lowest available color + if k1 > k: + connected = True + visited = set() + col1 = -1 + col2 = -1 + while connected and col1 < k: + col1 += 1 + neighbor_cols = graph[node].iter_neighbors_color(col1) + col1_adj = list(neighbor_cols) + + col2 = col1 + while connected and col2 < k: + col2 += 1 + visited = set(col1_adj) + frontier = list(col1_adj) + i = 0 + while i < len(frontier): + search_node = frontier[i] + i += 1 + col_opp = col2 if graph[search_node].color == col1 else col1 + neighbor_cols = graph[search_node].iter_neighbors_color(col_opp) + + for neighbor in neighbor_cols: + if neighbor not in visited: + visited.add(neighbor) + frontier.append(neighbor) + + # Search if node is not adj to any col2 vertex + connected = ( + len( + visited.intersection(graph[node].iter_neighbors_color(col2)) + ) + > 0 + ) + + # If connected is false then we can swap !!! + if not connected: + # Update all the nodes in the component + for search_node in visited: + graph[search_node].color = ( + col2 if graph[search_node].color == col1 else col1 + ) + col2_adj = graph[search_node].adj_color[col2] + graph[search_node].adj_color[col2] = graph[search_node].adj_color[ + col1 + ] + graph[search_node].adj_color[col1] = col2_adj + + # Update all the neighboring nodes + for search_node in visited: + col = graph[search_node].color + col_opp = col1 if col == col2 else col2 + for adj_node in graph[search_node].iter_neighbors(): + if graph[adj_node.node_id].color != col_opp: + # Direct reference to entry + adj_mate = adj_node.mate + graph[adj_node.node_id].clear_color(adj_mate, col_opp) + graph[adj_node.node_id].assign_color(adj_mate, col) + k1 = col1 + + # We can color this node color k1 + graph[node].color = k1 + k = max(k1, k) + + # Update the neighbors of this node + for adj_node in graph[node].iter_neighbors(): + adj_mate = adj_node.mate + graph[adj_node.node_id].assign_color(adj_mate, k1) + + return {node.node_id: node.color for node in graph.values()} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..df32ca1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__pycache__/test_coloring.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__pycache__/test_coloring.cpython-312.pyc new file mode 100644 index 0000000..2bb34f0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/__pycache__/test_coloring.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/test_coloring.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/test_coloring.py new file mode 100644 index 0000000..1e5a913 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/coloring/tests/test_coloring.py @@ -0,0 +1,863 @@ +"""Greedy coloring test suite.""" + +import itertools + +import pytest + +import networkx as nx + +is_coloring = nx.algorithms.coloring.equitable_coloring.is_coloring +is_equitable = nx.algorithms.coloring.equitable_coloring.is_equitable + + +ALL_STRATEGIES = [ + "largest_first", + "random_sequential", + "smallest_last", + "independent_set", + "connected_sequential_bfs", + "connected_sequential_dfs", + "connected_sequential", + "saturation_largest_first", + "DSATUR", +] + +# List of strategies where interchange=True results in an error +INTERCHANGE_INVALID = ["independent_set", "saturation_largest_first", "DSATUR"] + + +class TestColoring: + def test_basic_cases(self): + def check_basic_case(graph_func, n_nodes, strategy, interchange): + graph = graph_func() + coloring = nx.coloring.greedy_color( + graph, strategy=strategy, interchange=interchange + ) + assert verify_length(coloring, n_nodes) + assert verify_coloring(graph, coloring) + + for graph_func, n_nodes in BASIC_TEST_CASES.items(): + for interchange in [True, False]: + for strategy in ALL_STRATEGIES: + check_basic_case(graph_func, n_nodes, strategy, False) + if strategy not in INTERCHANGE_INVALID: + check_basic_case(graph_func, n_nodes, strategy, True) + + def test_special_cases(self): + def check_special_case(strategy, graph_func, interchange, colors): + graph = graph_func() + coloring = nx.coloring.greedy_color( + graph, strategy=strategy, interchange=interchange + ) + if not hasattr(colors, "__len__"): + colors = [colors] + assert any(verify_length(coloring, n_colors) for n_colors in colors) + assert verify_coloring(graph, coloring) + + for strategy, arglist in SPECIAL_TEST_CASES.items(): + for args in arglist: + check_special_case(strategy, args[0], args[1], args[2]) + + def test_interchange_invalid(self): + graph = one_node_graph() + for strategy in INTERCHANGE_INVALID: + pytest.raises( + nx.NetworkXPointlessConcept, + nx.coloring.greedy_color, + graph, + strategy=strategy, + interchange=True, + ) + + def test_bad_inputs(self): + graph = one_node_graph() + pytest.raises( + nx.NetworkXError, + nx.coloring.greedy_color, + graph, + strategy="invalid strategy", + ) + + def test_strategy_as_function(self): + graph = lf_shc() + colors_1 = nx.coloring.greedy_color(graph, "largest_first") + colors_2 = nx.coloring.greedy_color(graph, nx.coloring.strategy_largest_first) + assert colors_1 == colors_2 + + def test_seed_argument(self): + graph = lf_shc() + rs = nx.coloring.strategy_random_sequential + c1 = nx.coloring.greedy_color(graph, lambda g, c: rs(g, c, seed=1)) + for u, v in graph.edges: + assert c1[u] != c1[v] + + def test_is_coloring(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2)]) + coloring = {0: 0, 1: 1, 2: 0} + assert is_coloring(G, coloring) + + coloring[0] = 1 + assert not is_coloring(G, coloring) + assert not is_equitable(G, coloring) + + def test_is_equitable(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2)]) + coloring = {0: 0, 1: 1, 2: 0} + assert is_equitable(G, coloring) + + G.add_edges_from([(2, 3), (2, 4), (2, 5)]) + coloring[3] = 1 + coloring[4] = 1 + coloring[5] = 1 + assert is_coloring(G, coloring) + assert not is_equitable(G, coloring) + + def test_num_colors(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (0, 3)]) + pytest.raises(nx.NetworkXAlgorithmError, nx.coloring.equitable_color, G, 2) + + def test_equitable_color(self): + G = nx.fast_gnp_random_graph(n=10, p=0.2, seed=42) + coloring = nx.coloring.equitable_color(G, max_degree(G) + 1) + assert is_equitable(G, coloring) + + def test_equitable_color_empty(self): + G = nx.empty_graph() + coloring = nx.coloring.equitable_color(G, max_degree(G) + 1) + assert is_equitable(G, coloring) + + def test_equitable_color_large(self): + G = nx.fast_gnp_random_graph(100, 0.1, seed=42) + coloring = nx.coloring.equitable_color(G, max_degree(G) + 1) + assert is_equitable(G, coloring, num_colors=max_degree(G) + 1) + + def test_case_V_plus_not_in_A_cal(self): + # Hand crafted case to avoid the easy case. + L = { + 0: [2, 5], + 1: [3, 4], + 2: [0, 8], + 3: [1, 7], + 4: [1, 6], + 5: [0, 6], + 6: [4, 5], + 7: [3], + 8: [2], + } + + F = { + # Color 0 + 0: 0, + 1: 0, + # Color 1 + 2: 1, + 3: 1, + 4: 1, + 5: 1, + # Color 2 + 6: 2, + 7: 2, + 8: 2, + } + + C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F) + N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C) + H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N) + + nx.algorithms.coloring.equitable_coloring.procedure_P( + V_minus=0, V_plus=1, N=N, H=H, F=F, C=C, L=L + ) + check_state(L=L, N=N, H=H, F=F, C=C) + + def test_cast_no_solo(self): + L = { + 0: [8, 9], + 1: [10, 11], + 2: [8], + 3: [9], + 4: [10, 11], + 5: [8], + 6: [9], + 7: [10, 11], + 8: [0, 2, 5], + 9: [0, 3, 6], + 10: [1, 4, 7], + 11: [1, 4, 7], + } + + F = {0: 0, 1: 0, 2: 2, 3: 2, 4: 2, 5: 3, 6: 3, 7: 3, 8: 1, 9: 1, 10: 1, 11: 1} + + C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F) + N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C) + H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N) + + nx.algorithms.coloring.equitable_coloring.procedure_P( + V_minus=0, V_plus=1, N=N, H=H, F=F, C=C, L=L + ) + check_state(L=L, N=N, H=H, F=F, C=C) + + def test_hard_prob(self): + # Tests for two levels of recursion. + num_colors, s = 5, 5 + + G = nx.Graph() + G.add_edges_from( + [ + (0, 10), + (0, 11), + (0, 12), + (0, 23), + (10, 4), + (10, 9), + (10, 20), + (11, 4), + (11, 8), + (11, 16), + (12, 9), + (12, 22), + (12, 23), + (23, 7), + (1, 17), + (1, 18), + (1, 19), + (1, 24), + (17, 5), + (17, 13), + (17, 22), + (18, 5), + (19, 5), + (19, 6), + (19, 8), + (24, 7), + (24, 16), + (2, 4), + (2, 13), + (2, 14), + (2, 15), + (4, 6), + (13, 5), + (13, 21), + (14, 6), + (14, 15), + (15, 6), + (15, 21), + (3, 16), + (3, 20), + (3, 21), + (3, 22), + (16, 8), + (20, 8), + (21, 9), + (22, 7), + ] + ) + F = {node: node // s for node in range(num_colors * s)} + F[s - 1] = num_colors - 1 + + params = make_params_from_graph(G=G, F=F) + + nx.algorithms.coloring.equitable_coloring.procedure_P( + V_minus=0, V_plus=num_colors - 1, **params + ) + check_state(**params) + + def test_hardest_prob(self): + # Tests for two levels of recursion. + num_colors, s = 10, 4 + + G = nx.Graph() + G.add_edges_from( + [ + (0, 19), + (0, 24), + (0, 29), + (0, 30), + (0, 35), + (19, 3), + (19, 7), + (19, 9), + (19, 15), + (19, 21), + (19, 24), + (19, 30), + (19, 38), + (24, 5), + (24, 11), + (24, 13), + (24, 20), + (24, 30), + (24, 37), + (24, 38), + (29, 6), + (29, 10), + (29, 13), + (29, 15), + (29, 16), + (29, 17), + (29, 20), + (29, 26), + (30, 6), + (30, 10), + (30, 15), + (30, 22), + (30, 23), + (30, 39), + (35, 6), + (35, 9), + (35, 14), + (35, 18), + (35, 22), + (35, 23), + (35, 25), + (35, 27), + (1, 20), + (1, 26), + (1, 31), + (1, 34), + (1, 38), + (20, 4), + (20, 8), + (20, 14), + (20, 18), + (20, 28), + (20, 33), + (26, 7), + (26, 10), + (26, 14), + (26, 18), + (26, 21), + (26, 32), + (26, 39), + (31, 5), + (31, 8), + (31, 13), + (31, 16), + (31, 17), + (31, 21), + (31, 25), + (31, 27), + (34, 7), + (34, 8), + (34, 13), + (34, 18), + (34, 22), + (34, 23), + (34, 25), + (34, 27), + (38, 4), + (38, 9), + (38, 12), + (38, 14), + (38, 21), + (38, 27), + (2, 3), + (2, 18), + (2, 21), + (2, 28), + (2, 32), + (2, 33), + (2, 36), + (2, 37), + (2, 39), + (3, 5), + (3, 9), + (3, 13), + (3, 22), + (3, 23), + (3, 25), + (3, 27), + (18, 6), + (18, 11), + (18, 15), + (18, 39), + (21, 4), + (21, 10), + (21, 14), + (21, 36), + (28, 6), + (28, 10), + (28, 14), + (28, 16), + (28, 17), + (28, 25), + (28, 27), + (32, 5), + (32, 10), + (32, 12), + (32, 16), + (32, 17), + (32, 22), + (32, 23), + (33, 7), + (33, 10), + (33, 12), + (33, 16), + (33, 17), + (33, 25), + (33, 27), + (36, 5), + (36, 8), + (36, 15), + (36, 16), + (36, 17), + (36, 25), + (36, 27), + (37, 5), + (37, 11), + (37, 15), + (37, 16), + (37, 17), + (37, 22), + (37, 23), + (39, 7), + (39, 8), + (39, 15), + (39, 22), + (39, 23), + ] + ) + F = {node: node // s for node in range(num_colors * s)} + F[s - 1] = num_colors - 1 # V- = 0, V+ = num_colors - 1 + + params = make_params_from_graph(G=G, F=F) + + nx.algorithms.coloring.equitable_coloring.procedure_P( + V_minus=0, V_plus=num_colors - 1, **params + ) + check_state(**params) + + def test_strategy_saturation_largest_first(self): + def color_remaining_nodes( + G, + colored_nodes, + full_color_assignment=None, + nodes_to_add_between_calls=1, + ): + color_assignments = [] + aux_colored_nodes = colored_nodes.copy() + + node_iterator = nx.algorithms.coloring.greedy_coloring.strategy_saturation_largest_first( + G, aux_colored_nodes + ) + + for u in node_iterator: + # Set to keep track of colors of neighbors + nbr_colors = { + aux_colored_nodes[v] for v in G[u] if v in aux_colored_nodes + } + # Find the first unused color. + for color in itertools.count(): + if color not in nbr_colors: + break + aux_colored_nodes[u] = color + color_assignments.append((u, color)) + + # Color nodes between iterations + for i in range(nodes_to_add_between_calls - 1): + if not len(color_assignments) + len(colored_nodes) >= len( + full_color_assignment + ): + full_color_assignment_node, color = full_color_assignment[ + len(color_assignments) + len(colored_nodes) + ] + + # Assign the new color to the current node. + aux_colored_nodes[full_color_assignment_node] = color + color_assignments.append((full_color_assignment_node, color)) + + return color_assignments, aux_colored_nodes + + for G, _, _ in SPECIAL_TEST_CASES["saturation_largest_first"]: + G = G() + + # Check that function still works when nodes are colored between iterations + for nodes_to_add_between_calls in range(1, 5): + # Get a full color assignment, (including the order in which nodes were colored) + colored_nodes = {} + full_color_assignment, full_colored_nodes = color_remaining_nodes( + G, colored_nodes + ) + + # For each node in the color assignment, add it to colored_nodes and re-run the function + for ind, (node, color) in enumerate(full_color_assignment): + colored_nodes[node] = color + + ( + partial_color_assignment, + partial_colored_nodes, + ) = color_remaining_nodes( + G, + colored_nodes, + full_color_assignment=full_color_assignment, + nodes_to_add_between_calls=nodes_to_add_between_calls, + ) + + # Check that the color assignment and order of remaining nodes are the same + assert full_color_assignment[ind + 1 :] == partial_color_assignment + assert full_colored_nodes == partial_colored_nodes + + +# ############################ Utility functions ############################ +def verify_coloring(graph, coloring): + for node in graph.nodes(): + if node not in coloring: + return False + + color = coloring[node] + for neighbor in graph.neighbors(node): + if coloring[neighbor] == color: + return False + + return True + + +def verify_length(coloring, expected): + coloring = dict_to_sets(coloring) + return len(coloring) == expected + + +def dict_to_sets(colors): + if len(colors) == 0: + return [] + + k = max(colors.values()) + 1 + sets = [set() for _ in range(k)] + + for node, color in colors.items(): + sets[color].add(node) + + return sets + + +# ############################ Graph Generation ############################ + + +def empty_graph(): + return nx.Graph() + + +def one_node_graph(): + graph = nx.Graph() + graph.add_nodes_from([1]) + return graph + + +def two_node_graph(): + graph = nx.Graph() + graph.add_nodes_from([1, 2]) + graph.add_edges_from([(1, 2)]) + return graph + + +def three_node_clique(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3]) + graph.add_edges_from([(1, 2), (1, 3), (2, 3)]) + return graph + + +def disconnected(): + graph = nx.Graph() + graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)]) + return graph + + +def rs_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4]) + graph.add_edges_from([(1, 2), (2, 3), (3, 4)]) + return graph + + +def slf_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)] + ) + return graph + + +def slf_hc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 4), + (2, 6), + (5, 7), + (5, 8), + (6, 7), + (6, 8), + (7, 8), + ] + ) + return graph + + +def lf_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6]) + graph.add_edges_from([(6, 1), (1, 4), (4, 3), (3, 2), (2, 5)]) + return graph + + +def lf_hc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) + graph.add_edges_from( + [ + (1, 7), + (1, 6), + (1, 3), + (1, 4), + (7, 2), + (2, 6), + (2, 3), + (2, 5), + (5, 3), + (5, 4), + (4, 3), + ] + ) + return graph + + +def sl_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6]) + graph.add_edges_from( + [(1, 2), (1, 3), (2, 3), (1, 4), (2, 5), (3, 6), (4, 5), (4, 6), (5, 6)] + ) + return graph + + +def sl_hc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 5), + (1, 7), + (2, 3), + (2, 4), + (2, 8), + (8, 4), + (8, 6), + (8, 7), + (7, 5), + (7, 6), + (3, 4), + (4, 6), + (6, 5), + (5, 3), + ] + ) + return graph + + +def gis_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4]) + graph.add_edges_from([(1, 2), (2, 3), (3, 4)]) + return graph + + +def gis_hc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6]) + graph.add_edges_from([(1, 5), (2, 5), (3, 6), (4, 6), (5, 6)]) + return graph + + +def cs_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5]) + graph.add_edges_from([(1, 2), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (4, 5)]) + return graph + + +def rsi_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (3, 4), (4, 5), (4, 6), (5, 6)] + ) + return graph + + +def lfi_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) + graph.add_edges_from( + [(1, 2), (1, 5), (1, 6), (2, 3), (2, 7), (3, 4), (3, 7), (4, 5), (4, 6), (5, 6)] + ) + return graph + + +def lfi_hc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) + graph.add_edges_from( + [ + (1, 2), + (1, 5), + (1, 6), + (1, 7), + (2, 3), + (2, 8), + (2, 9), + (3, 4), + (3, 8), + (3, 9), + (4, 5), + (4, 6), + (4, 7), + (5, 6), + ] + ) + return graph + + +def sli_shc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 5), + (1, 7), + (2, 3), + (2, 6), + (3, 4), + (4, 5), + (4, 6), + (5, 7), + (6, 7), + ] + ) + return graph + + +def sli_hc(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) + graph.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 7), + (2, 8), + (2, 9), + (3, 6), + (3, 7), + (3, 9), + (4, 5), + (4, 6), + (4, 8), + (4, 9), + (5, 6), + (5, 7), + (5, 8), + (6, 7), + (6, 9), + (7, 8), + (8, 9), + ] + ) + return graph + + +# -------------------------------------------------------------------------- +# Basic tests for all strategies +# For each basic graph function, specify the number of expected colors. +BASIC_TEST_CASES = { + empty_graph: 0, + one_node_graph: 1, + two_node_graph: 2, + disconnected: 2, + three_node_clique: 3, +} + + +# -------------------------------------------------------------------------- +# Special test cases. Each strategy has a list of tuples of the form +# (graph function, interchange, valid # of colors) +SPECIAL_TEST_CASES = { + "random_sequential": [ + (rs_shc, False, (2, 3)), + (rs_shc, True, 2), + (rsi_shc, True, (3, 4)), + ], + "saturation_largest_first": [(slf_shc, False, (3, 4)), (slf_hc, False, 4)], + "largest_first": [ + (lf_shc, False, (2, 3)), + (lf_hc, False, 4), + (lf_shc, True, 2), + (lf_hc, True, 3), + (lfi_shc, True, (3, 4)), + (lfi_hc, True, 4), + ], + "smallest_last": [ + (sl_shc, False, (3, 4)), + (sl_hc, False, 5), + (sl_shc, True, 3), + (sl_hc, True, 4), + (sli_shc, True, (3, 4)), + (sli_hc, True, 5), + ], + "independent_set": [(gis_shc, False, (2, 3)), (gis_hc, False, 3)], + "connected_sequential": [(cs_shc, False, (3, 4)), (cs_shc, True, 3)], + "connected_sequential_dfs": [(cs_shc, False, (3, 4))], +} + + +# -------------------------------------------------------------------------- +# Helper functions to test +# (graph function, interchange, valid # of colors) + + +def check_state(L, N, H, F, C): + s = len(C[0]) + num_colors = len(C.keys()) + + assert all(u in L[v] for u in L for v in L[u]) + assert all(F[u] != F[v] for u in L for v in L[u]) + assert all(len(L[u]) < num_colors for u in L) + assert all(len(C[x]) == s for x in C) + assert all(H[(c1, c2)] >= 0 for c1 in C for c2 in C) + assert all(N[(u, F[u])] == 0 for u in F) + + +def max_degree(G): + """Get the maximum degree of any node in G.""" + return max(G.degree(node) for node in G.nodes) if len(G.nodes) > 0 else 0 + + +def make_params_from_graph(G, F): + """Returns {N, L, H, C} from the given graph.""" + num_nodes = len(G) + L = {u: [] for u in range(num_nodes)} + for u, v in G.edges: + L[u].append(v) + L[v].append(u) + + C = nx.algorithms.coloring.equitable_coloring.make_C_from_F(F) + N = nx.algorithms.coloring.equitable_coloring.make_N_from_L_C(L, C) + H = nx.algorithms.coloring.equitable_coloring.make_H_from_C_N(C, N) + + return {"N": N, "F": F, "C": C, "H": H, "L": L} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py new file mode 100644 index 0000000..dea156b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/communicability_alg.py @@ -0,0 +1,163 @@ +""" +Communicability. +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["communicability", "communicability_exp"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def communicability(G): + r"""Returns communicability between all pairs of nodes in G. + + The communicability between pairs of nodes in G is the sum of + walks of different lengths starting at node u and ending at node v. + + Parameters + ---------- + G: graph + + Returns + ------- + comm: dictionary of dictionaries + Dictionary of dictionaries keyed by nodes with communicability + as the value. + + Raises + ------ + NetworkXError + If the graph is not undirected and simple. + + See Also + -------- + communicability_exp: + Communicability between all pairs of nodes in G using spectral + decomposition. + communicability_betweenness_centrality: + Communicability betweenness centrality for each node in G. + + Notes + ----- + This algorithm uses a spectral decomposition of the adjacency matrix. + Let G=(V,E) be a simple undirected graph. Using the connection between + the powers of the adjacency matrix and the number of walks in the graph, + the communicability between nodes `u` and `v` based on the graph spectrum + is [1]_ + + .. math:: + C(u,v)=\sum_{j=1}^{n}\phi_{j}(u)\phi_{j}(v)e^{\lambda_{j}}, + + where `\phi_{j}(u)` is the `u\rm{th}` element of the `j\rm{th}` orthonormal + eigenvector of the adjacency matrix associated with the eigenvalue + `\lambda_{j}`. + + References + ---------- + .. [1] Ernesto Estrada, Naomichi Hatano, + "Communicability in complex networks", + Phys. Rev. E 77, 036111 (2008). + https://arxiv.org/abs/0707.0756 + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) + >>> c = nx.communicability(G) + """ + import numpy as np + + nodelist = list(G) # ordering of nodes in matrix + A = nx.to_numpy_array(G, nodelist) + # convert to 0-1 matrix + A[A != 0.0] = 1 + w, vec = np.linalg.eigh(A) + expw = np.exp(w) + mapping = dict(zip(nodelist, range(len(nodelist)))) + c = {} + # computing communicabilities + for u in G: + c[u] = {} + for v in G: + s = 0 + p = mapping[u] + q = mapping[v] + for j in range(len(nodelist)): + s += vec[:, j][p] * vec[:, j][q] * expw[j] + c[u][v] = float(s) + return c + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def communicability_exp(G): + r"""Returns communicability between all pairs of nodes in G. + + Communicability between pair of node (u,v) of node in G is the sum of + walks of different lengths starting at node u and ending at node v. + + Parameters + ---------- + G: graph + + Returns + ------- + comm: dictionary of dictionaries + Dictionary of dictionaries keyed by nodes with communicability + as the value. + + Raises + ------ + NetworkXError + If the graph is not undirected and simple. + + See Also + -------- + communicability: + Communicability between pairs of nodes in G. + communicability_betweenness_centrality: + Communicability betweenness centrality for each node in G. + + Notes + ----- + This algorithm uses matrix exponentiation of the adjacency matrix. + + Let G=(V,E) be a simple undirected graph. Using the connection between + the powers of the adjacency matrix and the number of walks in the graph, + the communicability between nodes u and v is [1]_, + + .. math:: + C(u,v) = (e^A)_{uv}, + + where `A` is the adjacency matrix of G. + + References + ---------- + .. [1] Ernesto Estrada, Naomichi Hatano, + "Communicability in complex networks", + Phys. Rev. E 77, 036111 (2008). + https://arxiv.org/abs/0707.0756 + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)]) + >>> c = nx.communicability_exp(G) + """ + import scipy as sp + + nodelist = list(G) # ordering of nodes in matrix + A = nx.to_numpy_array(G, nodelist) + # convert to 0-1 matrix + A[A != 0.0] = 1 + # communicability matrix + expA = sp.linalg.expm(A) + mapping = dict(zip(nodelist, range(len(nodelist)))) + c = {} + for u in G: + c[u] = {} + for v in G: + c[u][v] = float(expA[mapping[u], mapping[v]]) + return c diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__init__.py new file mode 100644 index 0000000..494a8e1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__init__.py @@ -0,0 +1,28 @@ +"""Functions for computing and measuring community structure. + +The ``community`` subpackage can be accessed by using :mod:`networkx.community`, then accessing the +functions as attributes of ``community``. For example:: + + >>> import networkx as nx + >>> G = nx.barbell_graph(5, 1) + >>> communities_generator = nx.community.girvan_newman(G) + >>> top_level_communities = next(communities_generator) + >>> next_level_communities = next(communities_generator) + >>> sorted(map(sorted, next_level_communities)) + [[0, 1, 2, 3, 4], [5], [6, 7, 8, 9, 10]] + +""" + +from networkx.algorithms.community.asyn_fluid import * +from networkx.algorithms.community.centrality import * +from networkx.algorithms.community.divisive import * +from networkx.algorithms.community.kclique import * +from networkx.algorithms.community.kernighan_lin import * +from networkx.algorithms.community.label_propagation import * +from networkx.algorithms.community.lukes import * +from networkx.algorithms.community.modularity_max import * +from networkx.algorithms.community.quality import * +from networkx.algorithms.community.community_utils import * +from networkx.algorithms.community.louvain import * +from networkx.algorithms.community.leiden import * +from networkx.algorithms.community.local import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..44a3729 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-312.pyc new file mode 100644 index 0000000..1f02db1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/asyn_fluid.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-312.pyc new file mode 100644 index 0000000..1861b95 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-312.pyc new file mode 100644 index 0000000..20ac92d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/community_utils.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/divisive.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/divisive.cpython-312.pyc new file mode 100644 index 0000000..879caeb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/divisive.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-312.pyc new file mode 100644 index 0000000..1f5b123 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/kclique.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-312.pyc new file mode 100644 index 0000000..9e23c05 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/kernighan_lin.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-312.pyc new file mode 100644 index 0000000..b5dca23 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/label_propagation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/leiden.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/leiden.cpython-312.pyc new file mode 100644 index 0000000..18422f6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/leiden.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/local.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/local.cpython-312.pyc new file mode 100644 index 0000000..b007766 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/local.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-312.pyc new file mode 100644 index 0000000..9becd97 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/louvain.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-312.pyc new file mode 100644 index 0000000..e92cdb8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/lukes.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-312.pyc new file mode 100644 index 0000000..4f1485e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/modularity_max.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-312.pyc new file mode 100644 index 0000000..b12fe19 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/__pycache__/quality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/asyn_fluid.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/asyn_fluid.py new file mode 100644 index 0000000..fea72c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/asyn_fluid.py @@ -0,0 +1,151 @@ +"""Asynchronous Fluid Communities algorithm for community detection.""" + +from collections import Counter + +import networkx as nx +from networkx.algorithms.components import is_connected +from networkx.exception import NetworkXError +from networkx.utils import groups, not_implemented_for, py_random_state + +__all__ = ["asyn_fluidc"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(3) +@nx._dispatchable +def asyn_fluidc(G, k, max_iter=100, seed=None): + """Returns communities in `G` as detected by Fluid Communities algorithm. + + The asynchronous fluid communities algorithm is described in + [1]_. The algorithm is based on the simple idea of fluids interacting + in an environment, expanding and pushing each other. Its initialization is + random, so found communities may vary on different executions. + + The algorithm proceeds as follows. First each of the initial k communities + is initialized in a random vertex in the graph. Then the algorithm iterates + over all vertices in a random order, updating the community of each vertex + based on its own community and the communities of its neighbors. This + process is performed several times until convergence. + At all times, each community has a total density of 1, which is equally + distributed among the vertices it contains. If a vertex changes of + community, vertex densities of affected communities are adjusted + immediately. When a complete iteration over all vertices is done, such that + no vertex changes the community it belongs to, the algorithm has converged + and returns. + + This is the original version of the algorithm described in [1]_. + Unfortunately, it does not support weighted graphs yet. + + Parameters + ---------- + G : NetworkX graph + Graph must be simple and undirected. + + k : integer + The number of communities to be found. + + max_iter : integer + The number of maximum iterations allowed. By default 100. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + communities : iterable + Iterable of communities given as sets of nodes. + + Notes + ----- + k variable is not an optional argument. + + References + ---------- + .. [1] Parés F., Garcia-Gasulla D. et al. "Fluid Communities: A + Competitive and Highly Scalable Community Detection Algorithm". + [https://arxiv.org/pdf/1703.09307.pdf]. + """ + # Initial checks + if not isinstance(k, int): + raise NetworkXError("k must be an integer.") + if not k > 0: + raise NetworkXError("k must be greater than 0.") + if not is_connected(G): + raise NetworkXError("Fluid Communities require connected Graphs.") + if len(G) < k: + raise NetworkXError("k cannot be bigger than the number of nodes.") + # Initialization + max_density = 1.0 + vertices = list(G) + seed.shuffle(vertices) + communities = {n: i for i, n in enumerate(vertices[:k])} + density = {} + com_to_numvertices = {} + for vertex in communities: + com_to_numvertices[communities[vertex]] = 1 + density[communities[vertex]] = max_density + # Set up control variables and start iterating + iter_count = 0 + cont = True + while cont: + cont = False + iter_count += 1 + # Loop over all vertices in graph in a random order + vertices = list(G) + seed.shuffle(vertices) + for vertex in vertices: + # Updating rule + com_counter = Counter() + # Take into account self vertex community + try: + com_counter.update({communities[vertex]: density[communities[vertex]]}) + except KeyError: + pass + # Gather neighbor vertex communities + for v in G[vertex]: + try: + com_counter.update({communities[v]: density[communities[v]]}) + except KeyError: + continue + # Check which is the community with highest density + new_com = -1 + if len(com_counter.keys()) > 0: + max_freq = max(com_counter.values()) + best_communities = [ + com + for com, freq in com_counter.items() + if (max_freq - freq) < 0.0001 + ] + # If actual vertex com in best communities, it is preserved + try: + if communities[vertex] in best_communities: + new_com = communities[vertex] + except KeyError: + pass + # If vertex community changes... + if new_com == -1: + # Set flag of non-convergence + cont = True + # Randomly chose a new community from candidates + new_com = seed.choice(best_communities) + # Update previous community status + try: + com_to_numvertices[communities[vertex]] -= 1 + density[communities[vertex]] = ( + max_density / com_to_numvertices[communities[vertex]] + ) + except KeyError: + pass + # Update new community status + communities[vertex] = new_com + com_to_numvertices[communities[vertex]] += 1 + density[communities[vertex]] = ( + max_density / com_to_numvertices[communities[vertex]] + ) + # If maximum iterations reached --> output actual results + if iter_count > max_iter: + break + # Return results by grouping communities as list of vertices + return iter(groups(communities).values()) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/centrality.py new file mode 100644 index 0000000..4328170 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/centrality.py @@ -0,0 +1,171 @@ +"""Functions for computing communities based on centrality notions.""" + +import networkx as nx + +__all__ = ["girvan_newman"] + + +@nx._dispatchable(preserve_edge_attrs="most_valuable_edge") +def girvan_newman(G, most_valuable_edge=None): + """Finds communities in a graph using the Girvan–Newman method. + + Parameters + ---------- + G : NetworkX graph + + most_valuable_edge : function + Function that takes a graph as input and outputs an edge. The + edge returned by this function will be recomputed and removed at + each iteration of the algorithm. + + If not specified, the edge with the highest + :func:`networkx.edge_betweenness_centrality` will be used. + + Returns + ------- + iterator + Iterator over tuples of sets of nodes in `G`. Each set of node + is a community, each tuple is a sequence of communities at a + particular level of the algorithm. + + Examples + -------- + To get the first pair of communities:: + + >>> G = nx.path_graph(10) + >>> comp = nx.community.girvan_newman(G) + >>> tuple(sorted(c) for c in next(comp)) + ([0, 1, 2, 3, 4], [5, 6, 7, 8, 9]) + + To get only the first *k* tuples of communities, use + :func:`itertools.islice`:: + + >>> import itertools + >>> G = nx.path_graph(8) + >>> k = 2 + >>> comp = nx.community.girvan_newman(G) + >>> for communities in itertools.islice(comp, k): + ... print(tuple(sorted(c) for c in communities)) + ... + ([0, 1, 2, 3], [4, 5, 6, 7]) + ([0, 1], [2, 3], [4, 5, 6, 7]) + + To stop getting tuples of communities once the number of communities + is greater than *k*, use :func:`itertools.takewhile`:: + + >>> import itertools + >>> G = nx.path_graph(8) + >>> k = 4 + >>> comp = nx.community.girvan_newman(G) + >>> limited = itertools.takewhile(lambda c: len(c) <= k, comp) + >>> for communities in limited: + ... print(tuple(sorted(c) for c in communities)) + ... + ([0, 1, 2, 3], [4, 5, 6, 7]) + ([0, 1], [2, 3], [4, 5, 6, 7]) + ([0, 1], [2, 3], [4, 5], [6, 7]) + + To just choose an edge to remove based on the weight:: + + >>> from operator import itemgetter + >>> G = nx.path_graph(10) + >>> edges = G.edges() + >>> nx.set_edge_attributes(G, {(u, v): v for u, v in edges}, "weight") + >>> def heaviest(G): + ... u, v, w = max(G.edges(data="weight"), key=itemgetter(2)) + ... return (u, v) + ... + >>> comp = nx.community.girvan_newman(G, most_valuable_edge=heaviest) + >>> tuple(sorted(c) for c in next(comp)) + ([0, 1, 2, 3, 4, 5, 6, 7, 8], [9]) + + To utilize edge weights when choosing an edge with, for example, the + highest betweenness centrality:: + + >>> from networkx import edge_betweenness_centrality as betweenness + >>> def most_central_edge(G): + ... centrality = betweenness(G, weight="weight") + ... return max(centrality, key=centrality.get) + ... + >>> G = nx.path_graph(10) + >>> comp = nx.community.girvan_newman(G, most_valuable_edge=most_central_edge) + >>> tuple(sorted(c) for c in next(comp)) + ([0, 1, 2, 3, 4], [5, 6, 7, 8, 9]) + + To specify a different ranking algorithm for edges, use the + `most_valuable_edge` keyword argument:: + + >>> from networkx import edge_betweenness_centrality + >>> from random import random + >>> def most_central_edge(G): + ... centrality = edge_betweenness_centrality(G) + ... max_cent = max(centrality.values()) + ... # Scale the centrality values so they are between 0 and 1, + ... # and add some random noise. + ... centrality = {e: c / max_cent for e, c in centrality.items()} + ... # Add some random noise. + ... centrality = {e: c + random() for e, c in centrality.items()} + ... return max(centrality, key=centrality.get) + ... + >>> G = nx.path_graph(10) + >>> comp = nx.community.girvan_newman(G, most_valuable_edge=most_central_edge) + + Notes + ----- + The Girvan–Newman algorithm detects communities by progressively + removing edges from the original graph. The algorithm removes the + "most valuable" edge, traditionally the edge with the highest + betweenness centrality, at each step. As the graph breaks down into + pieces, the tightly knit community structure is exposed and the + result can be depicted as a dendrogram. + + """ + # If the graph is already empty, simply return its connected + # components. + if G.number_of_edges() == 0: + yield tuple(nx.connected_components(G)) + return + # If no function is provided for computing the most valuable edge, + # use the edge betweenness centrality. + if most_valuable_edge is None: + + def most_valuable_edge(G): + """Returns the edge with the highest betweenness centrality + in the graph `G`. + + """ + # We have guaranteed that the graph is non-empty, so this + # dictionary will never be empty. + betweenness = nx.edge_betweenness_centrality(G) + return max(betweenness, key=betweenness.get) + + # The copy of G here must include the edge weight data. + g = G.copy().to_undirected() + # Self-loops must be removed because their removal has no effect on + # the connected components of the graph. + g.remove_edges_from(nx.selfloop_edges(g)) + while g.number_of_edges() > 0: + yield _without_most_central_edges(g, most_valuable_edge) + + +def _without_most_central_edges(G, most_valuable_edge): + """Returns the connected components of the graph that results from + repeatedly removing the most "valuable" edge in the graph. + + `G` must be a non-empty graph. This function modifies the graph `G` + in-place; that is, it removes edges on the graph `G`. + + `most_valuable_edge` is a function that takes the graph `G` as input + (or a subgraph with one or more edges of `G` removed) and returns an + edge. That edge will be removed and this process will be repeated + until the number of connected components in the graph increases. + + """ + original_num_components = nx.number_connected_components(G) + num_new_components = original_num_components + while num_new_components <= original_num_components: + edge = most_valuable_edge(G) + G.remove_edge(*edge) + new_components = tuple(nx.connected_components(G)) + num_new_components = len(new_components) + return new_components diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/community_utils.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/community_utils.py new file mode 100644 index 0000000..ba73a6b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/community_utils.py @@ -0,0 +1,30 @@ +"""Helper functions for community-finding algorithms.""" + +import networkx as nx + +__all__ = ["is_partition"] + + +@nx._dispatchable +def is_partition(G, communities): + """Returns *True* if `communities` is a partition of the nodes of `G`. + + A partition of a universe set is a family of pairwise disjoint sets + whose union is the entire universe set. + + Parameters + ---------- + G : NetworkX graph. + + communities : list or iterable of sets of nodes + If not a list, the iterable is converted internally to a list. + If it is an iterator it is exhausted. + + """ + # Alternate implementation: + # return all(sum(1 if v in c else 0 for c in communities) == 1 for v in G) + if not isinstance(communities, list): + communities = list(communities) + nodes = {n for c in communities for n in c if n in G} + + return len(G) == len(nodes) == sum(len(c) for c in communities) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/divisive.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/divisive.py new file mode 100644 index 0000000..be3c7d8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/divisive.py @@ -0,0 +1,216 @@ +import functools + +import networkx as nx + +__all__ = [ + "edge_betweenness_partition", + "edge_current_flow_betweenness_partition", +] + + +@nx._dispatchable(edge_attrs="weight") +def edge_betweenness_partition(G, number_of_sets, *, weight=None): + """Partition created by iteratively removing the highest edge betweenness edge. + + This algorithm works by calculating the edge betweenness for all + edges and removing the edge with the highest value. It is then + determined whether the graph has been broken into at least + `number_of_sets` connected components. + If not the process is repeated. + + Parameters + ---------- + G : NetworkX Graph, DiGraph or MultiGraph + Graph to be partitioned + + number_of_sets : int + Number of sets in the desired partition of the graph + + weight : key, optional, default=None + The key to use if using weights for edge betweenness calculation + + Returns + ------- + C : list of sets + Partition of the nodes of G + + Raises + ------ + NetworkXError + If number_of_sets is <= 0 or if number_of_sets > len(G) + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> part = nx.community.edge_betweenness_partition(G, 2) + >>> {0, 1, 3, 4, 5, 6, 7, 10, 11, 12, 13, 16, 17, 19, 21} in part + True + >>> { + ... 2, + ... 8, + ... 9, + ... 14, + ... 15, + ... 18, + ... 20, + ... 22, + ... 23, + ... 24, + ... 25, + ... 26, + ... 27, + ... 28, + ... 29, + ... 30, + ... 31, + ... 32, + ... 33, + ... } in part + True + + See Also + -------- + edge_current_flow_betweenness_partition + + Notes + ----- + This algorithm is fairly slow, as both the calculation of connected + components and edge betweenness relies on all pairs shortest + path algorithms. They could potentially be combined to cut down + on overall computation time. + + References + ---------- + .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports + Volume 486, Issue 3-5 p. 75-174 + http://arxiv.org/abs/0906.0612 + """ + if number_of_sets <= 0: + raise nx.NetworkXError("number_of_sets must be >0") + if number_of_sets == 1: + return [set(G)] + if number_of_sets == len(G): + return [{n} for n in G] + if number_of_sets > len(G): + raise nx.NetworkXError("number_of_sets must be <= len(G)") + + H = G.copy() + partition = list(nx.connected_components(H)) + while len(partition) < number_of_sets: + ranking = nx.edge_betweenness_centrality(H, weight=weight) + edge = max(ranking, key=ranking.get) + H.remove_edge(*edge) + partition = list(nx.connected_components(H)) + return partition + + +@nx._dispatchable(edge_attrs="weight") +def edge_current_flow_betweenness_partition(G, number_of_sets, *, weight=None): + """Partition created by removing the highest edge current flow betweenness edge. + + This algorithm works by calculating the edge current flow + betweenness for all edges and removing the edge with the + highest value. It is then determined whether the graph has + been broken into at least `number_of_sets` connected + components. If not the process is repeated. + + Parameters + ---------- + G : NetworkX Graph, DiGraph or MultiGraph + Graph to be partitioned + + number_of_sets : int + Number of sets in the desired partition of the graph + + weight : key, optional (default=None) + The edge attribute key to use as weights for + edge current flow betweenness calculations + + Returns + ------- + C : list of sets + Partition of G + + Raises + ------ + NetworkXError + If number_of_sets is <= 0 or number_of_sets > len(G) + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> part = nx.community.edge_current_flow_betweenness_partition(G, 2) + >>> {0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 11, 12, 13, 16, 17, 19, 21} in part + True + >>> {8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33} in part + True + + + See Also + -------- + edge_betweenness_partition + + Notes + ----- + This algorithm is extremely slow, as the recalculation of the edge + current flow betweenness is extremely slow. + + References + ---------- + .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports + Volume 486, Issue 3-5 p. 75-174 + http://arxiv.org/abs/0906.0612 + """ + if number_of_sets <= 0: + raise nx.NetworkXError("number_of_sets must be >0") + elif number_of_sets == 1: + return [set(G)] + elif number_of_sets == len(G): + return [{n} for n in G] + elif number_of_sets > len(G): + raise nx.NetworkXError("number_of_sets must be <= len(G)") + + rank = functools.partial( + nx.edge_current_flow_betweenness_centrality, normalized=False, weight=weight + ) + + # current flow requires a connected network so we track the components explicitly + H = G.copy() + partition = list(nx.connected_components(H)) + if len(partition) > 1: + Hcc_subgraphs = [H.subgraph(cc).copy() for cc in partition] + else: + Hcc_subgraphs = [H] + + ranking = {} + for Hcc in Hcc_subgraphs: + ranking.update(rank(Hcc)) + + while len(partition) < number_of_sets: + edge = max(ranking, key=ranking.get) + for cc, Hcc in zip(partition, Hcc_subgraphs): + if edge[0] in cc: + Hcc.remove_edge(*edge) + del ranking[edge] + splitcc_list = list(nx.connected_components(Hcc)) + if len(splitcc_list) > 1: + # there are 2 connected components. split off smaller one + cc_new = min(splitcc_list, key=len) + Hcc_new = Hcc.subgraph(cc_new).copy() + # update edge rankings for Hcc_new + newranks = rank(Hcc_new) + for e, r in newranks.items(): + ranking[e if e in ranking else e[::-1]] = r + # append new cc and Hcc to their lists. + partition.append(cc_new) + Hcc_subgraphs.append(Hcc_new) + + # leave existing cc and Hcc in their lists, but shrink them + Hcc.remove_nodes_from(cc_new) + cc.difference_update(cc_new) + # update edge rankings for Hcc whether it was split or not + newranks = rank(Hcc) + for e, r in newranks.items(): + ranking[e if e in ranking else e[::-1]] = r + break + return partition diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kclique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kclique.py new file mode 100644 index 0000000..c724910 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kclique.py @@ -0,0 +1,79 @@ +from collections import defaultdict + +import networkx as nx + +__all__ = ["k_clique_communities"] + + +@nx._dispatchable +def k_clique_communities(G, k, cliques=None): + """Find k-clique communities in graph using the percolation method. + + A k-clique community is the union of all cliques of size k that + can be reached through adjacent (sharing k-1 nodes) k-cliques. + + Parameters + ---------- + G : NetworkX graph + + k : int + Size of smallest clique + + cliques: list or generator + Precomputed cliques (use networkx.find_cliques(G)) + + Returns + ------- + Yields sets of nodes, one for each k-clique community. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> K5 = nx.convert_node_labels_to_integers(G, first_label=2) + >>> G.add_edges_from(K5.edges()) + >>> c = list(nx.community.k_clique_communities(G, 4)) + >>> sorted(list(c[0])) + [0, 1, 2, 3, 4, 5, 6] + >>> list(nx.community.k_clique_communities(G, 6)) + [] + + References + ---------- + .. [1] Gergely Palla, Imre Derényi, Illés Farkas1, and Tamás Vicsek, + Uncovering the overlapping community structure of complex networks + in nature and society Nature 435, 814-818, 2005, + doi:10.1038/nature03607 + """ + if k < 2: + raise nx.NetworkXError(f"k={k}, k must be greater than 1.") + if cliques is None: + cliques = nx.find_cliques(G) + cliques = [frozenset(c) for c in cliques if len(c) >= k] + + # First index which nodes are in which cliques + membership_dict = defaultdict(list) + for clique in cliques: + for node in clique: + membership_dict[node].append(clique) + + # For each clique, see which adjacent cliques percolate + perc_graph = nx.Graph() + perc_graph.add_nodes_from(cliques) + for clique in cliques: + for adj_clique in _get_adjacent_cliques(clique, membership_dict): + if len(clique.intersection(adj_clique)) >= (k - 1): + perc_graph.add_edge(clique, adj_clique) + + # Connected components of clique graph with perc edges + # are the percolated cliques + for component in nx.connected_components(perc_graph): + yield (frozenset.union(*component)) + + +def _get_adjacent_cliques(clique, membership_dict): + adjacent_cliques = set() + for n in clique: + for adj_clique in membership_dict[n]: + if clique != adj_clique: + adjacent_cliques.add(adj_clique) + return adjacent_cliques diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kernighan_lin.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kernighan_lin.py new file mode 100644 index 0000000..f6397d8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/kernighan_lin.py @@ -0,0 +1,139 @@ +"""Functions for computing the Kernighan–Lin bipartition algorithm.""" + +from itertools import count + +import networkx as nx +from networkx.algorithms.community.community_utils import is_partition +from networkx.utils import BinaryHeap, not_implemented_for, py_random_state + +__all__ = ["kernighan_lin_bisection"] + + +def _kernighan_lin_sweep(edges, side): + """ + This is a modified form of Kernighan-Lin, which moves single nodes at a + time, alternating between sides to keep the bisection balanced. We keep + two min-heaps of swap costs to make optimal-next-move selection fast. + """ + costs0, costs1 = costs = BinaryHeap(), BinaryHeap() + for u, side_u, edges_u in zip(count(), side, edges): + cost_u = sum(w if side[v] else -w for v, w in edges_u) + costs[side_u].insert(u, cost_u if side_u else -cost_u) + + def _update_costs(costs_x, x): + for y, w in edges[x]: + costs_y = costs[side[y]] + cost_y = costs_y.get(y) + if cost_y is not None: + cost_y += 2 * (-w if costs_x is costs_y else w) + costs_y.insert(y, cost_y, True) + + i = 0 + totcost = 0 + while costs0 and costs1: + u, cost_u = costs0.pop() + _update_costs(costs0, u) + v, cost_v = costs1.pop() + _update_costs(costs1, v) + totcost += cost_u + cost_v + i += 1 + yield totcost, i, (u, v) + + +@not_implemented_for("directed") +@py_random_state(4) +@nx._dispatchable(edge_attrs="weight") +def kernighan_lin_bisection(G, partition=None, max_iter=10, weight="weight", seed=None): + """Partition a graph into two blocks using the Kernighan–Lin + algorithm. + + This algorithm partitions a network into two sets by iteratively + swapping pairs of nodes to reduce the edge cut between the two sets. The + pairs are chosen according to a modified form of Kernighan-Lin [1]_, which + moves node individually, alternating between sides to keep the bisection + balanced. + + Parameters + ---------- + G : NetworkX graph + Graph must be undirected. + + partition : tuple + Pair of iterables containing an initial partition. If not + specified, a random balanced partition is used. + + max_iter : int + Maximum number of times to attempt swaps to find an + improvement before giving up. + + weight : key + Edge data key to use as weight. If None, the weights are all + set to one. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + Only used if partition is None + + Returns + ------- + partition : tuple + A pair of sets of nodes representing the bipartition. + + Raises + ------ + NetworkXError + If partition is not a valid partition of the nodes of the graph. + + References + ---------- + .. [1] Kernighan, B. W.; Lin, Shen (1970). + "An efficient heuristic procedure for partitioning graphs." + *Bell Systems Technical Journal* 49: 291--307. + Oxford University Press 2011. + + """ + n = len(G) + labels = list(G) + seed.shuffle(labels) + index = {v: i for i, v in enumerate(labels)} + + if partition is None: + side = [0] * (n // 2) + [1] * ((n + 1) // 2) + else: + try: + A, B = partition + except (TypeError, ValueError) as err: + raise nx.NetworkXError("partition must be two sets") from err + if not is_partition(G, (A, B)): + raise nx.NetworkXError("partition invalid") + side = [0] * n + for a in A: + side[index[a]] = 1 + + if G.is_multigraph(): + edges = [ + [ + (index[u], sum(e.get(weight, 1) for e in d.values())) + for u, d in G[v].items() + ] + for v in labels + ] + else: + edges = [ + [(index[u], e.get(weight, 1)) for u, e in G[v].items()] for v in labels + ] + + for i in range(max_iter): + costs = list(_kernighan_lin_sweep(edges, side)) + min_cost, min_i, _ = min(costs) + if min_cost >= 0: + break + + for _, _, (u, v) in costs[:min_i]: + side[u] = 1 + side[v] = 0 + + A = {u for u, s in zip(labels, side) if s == 0} + B = {u for u, s in zip(labels, side) if s == 1} + return A, B diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/label_propagation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/label_propagation.py new file mode 100644 index 0000000..7488028 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/label_propagation.py @@ -0,0 +1,338 @@ +""" +Label propagation community detection algorithms. +""" + +from collections import Counter, defaultdict, deque + +import networkx as nx +from networkx.utils import groups, not_implemented_for, py_random_state + +__all__ = [ + "label_propagation_communities", + "asyn_lpa_communities", + "fast_label_propagation_communities", +] + + +@py_random_state("seed") +@nx._dispatchable(edge_attrs="weight") +def fast_label_propagation_communities(G, *, weight=None, seed=None): + """Returns communities in `G` as detected by fast label propagation. + + The fast label propagation algorithm is described in [1]_. The algorithm is + probabilistic and the found communities may vary in different executions. + + The algorithm operates as follows. First, the community label of each node is + set to a unique label. The algorithm then repeatedly updates the labels of + the nodes to the most frequent label in their neighborhood. In case of ties, + a random label is chosen from the most frequent labels. + + The algorithm maintains a queue of nodes that still need to be processed. + Initially, all nodes are added to the queue in a random order. Then the nodes + are removed from the queue one by one and processed. If a node updates its label, + all its neighbors that have a different label are added to the queue (if not + already in the queue). The algorithm stops when the queue is empty. + + Parameters + ---------- + G : Graph, DiGraph, MultiGraph, or MultiDiGraph + Any NetworkX graph. + + weight : string, or None (default) + The edge attribute representing a non-negative weight of an edge. If None, + each edge is assumed to have weight one. The weight of an edge is used in + determining the frequency with which a label appears among the neighbors of + a node (edge with weight `w` is equivalent to `w` unweighted edges). + + seed : integer, random_state, or None (default) + Indicator of random number generation state. See :ref:`Randomness`. + + Returns + ------- + communities : iterable + Iterable of communities given as sets of nodes. + + Notes + ----- + Edge directions are ignored for directed graphs. + Edge weights must be non-negative numbers. + + References + ---------- + .. [1] Vincent A. Traag & Lovro Šubelj. "Large network community detection by + fast label propagation." Scientific Reports 13 (2023): 2701. + https://doi.org/10.1038/s41598-023-29610-z + """ + + # Queue of nodes to be processed. + nodes_queue = deque(G) + seed.shuffle(nodes_queue) + + # Set of nodes in the queue. + nodes_set = set(G) + + # Assign unique label to each node. + comms = {node: i for i, node in enumerate(G)} + + while nodes_queue: + # Remove next node from the queue to process. + node = nodes_queue.popleft() + nodes_set.remove(node) + + # Isolated nodes retain their initial label. + if G.degree(node) > 0: + # Compute frequency of labels in node's neighborhood. + label_freqs = _fast_label_count(G, comms, node, weight) + max_freq = max(label_freqs.values()) + + # Always sample new label from most frequent labels. + comm = seed.choice( + [comm for comm in label_freqs if label_freqs[comm] == max_freq] + ) + + if comms[node] != comm: + comms[node] = comm + + # Add neighbors that have different label to the queue. + for nbr in nx.all_neighbors(G, node): + if comms[nbr] != comm and nbr not in nodes_set: + nodes_queue.append(nbr) + nodes_set.add(nbr) + + yield from groups(comms).values() + + +def _fast_label_count(G, comms, node, weight=None): + """Computes the frequency of labels in the neighborhood of a node. + + Returns a dictionary keyed by label to the frequency of that label. + """ + + if weight is None: + # Unweighted (un)directed simple graph. + if not G.is_multigraph(): + label_freqs = Counter(map(comms.get, nx.all_neighbors(G, node))) + + # Unweighted (un)directed multigraph. + else: + label_freqs = defaultdict(int) + for nbr in G[node]: + label_freqs[comms[nbr]] += len(G[node][nbr]) + + if G.is_directed(): + for nbr in G.pred[node]: + label_freqs[comms[nbr]] += len(G.pred[node][nbr]) + + else: + # Weighted undirected simple/multigraph. + label_freqs = defaultdict(float) + for _, nbr, w in G.edges(node, data=weight, default=1): + label_freqs[comms[nbr]] += w + + # Weighted directed simple/multigraph. + if G.is_directed(): + for nbr, _, w in G.in_edges(node, data=weight, default=1): + label_freqs[comms[nbr]] += w + + return label_freqs + + +@py_random_state(2) +@nx._dispatchable(edge_attrs="weight") +def asyn_lpa_communities(G, weight=None, seed=None): + """Returns communities in `G` as detected by asynchronous label + propagation. + + The asynchronous label propagation algorithm is described in + [1]_. The algorithm is probabilistic and the found communities may + vary on different executions. + + The algorithm proceeds as follows. After initializing each node with + a unique label, the algorithm repeatedly sets the label of a node to + be the label that appears most frequently among that nodes + neighbors. The algorithm halts when each node has the label that + appears most frequently among its neighbors. The algorithm is + asynchronous because each node is updated without waiting for + updates on the remaining nodes. + + This generalized version of the algorithm in [1]_ accepts edge + weights. + + Parameters + ---------- + G : Graph + + weight : string + The edge attribute representing the weight of an edge. + If None, each edge is assumed to have weight one. In this + algorithm, the weight of an edge is used in determining the + frequency with which a label appears among the neighbors of a + node: a higher weight means the label appears more often. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + communities : iterable + Iterable of communities given as sets of nodes. + + Notes + ----- + Edge weight attributes must be numerical. + + References + ---------- + .. [1] Raghavan, Usha Nandini, Réka Albert, and Soundar Kumara. "Near + linear time algorithm to detect community structures in large-scale + networks." Physical Review E 76.3 (2007): 036106. + """ + + labels = {n: i for i, n in enumerate(G)} + cont = True + + while cont: + cont = False + nodes = list(G) + seed.shuffle(nodes) + + for node in nodes: + if not G[node]: + continue + + # Get label frequencies among adjacent nodes. + # Depending on the order they are processed in, + # some nodes will be in iteration t and others in t-1, + # making the algorithm asynchronous. + if weight is None: + # initialising a Counter from an iterator of labels is + # faster for getting unweighted label frequencies + label_freq = Counter(map(labels.get, G[node])) + else: + # updating a defaultdict is substantially faster + # for getting weighted label frequencies + label_freq = defaultdict(float) + for _, v, wt in G.edges(node, data=weight, default=1): + label_freq[labels[v]] += wt + + # Get the labels that appear with maximum frequency. + max_freq = max(label_freq.values()) + best_labels = [ + label for label, freq in label_freq.items() if freq == max_freq + ] + + # If the node does not have one of the maximum frequency labels, + # randomly choose one of them and update the node's label. + # Continue the iteration as long as at least one node + # doesn't have a maximum frequency label. + if labels[node] not in best_labels: + labels[node] = seed.choice(best_labels) + cont = True + + yield from groups(labels).values() + + +@not_implemented_for("directed") +@nx._dispatchable +def label_propagation_communities(G): + """Generates community sets determined by label propagation + + Finds communities in `G` using a semi-synchronous label propagation + method [1]_. This method combines the advantages of both the synchronous + and asynchronous models. Not implemented for directed graphs. + + Parameters + ---------- + G : graph + An undirected NetworkX graph. + + Returns + ------- + communities : iterable + A dict_values object that contains a set of nodes for each community. + + Raises + ------ + NetworkXNotImplemented + If the graph is directed + + References + ---------- + .. [1] Cordasco, G., & Gargano, L. (2010, December). Community detection + via semi-synchronous label propagation algorithms. In Business + Applications of Social Network Analysis (BASNA), 2010 IEEE International + Workshop on (pp. 1-8). IEEE. + """ + coloring = _color_network(G) + # Create a unique label for each node in the graph + labeling = {v: k for k, v in enumerate(G)} + while not _labeling_complete(labeling, G): + # Update the labels of every node with the same color. + for color, nodes in coloring.items(): + for n in nodes: + _update_label(n, labeling, G) + + clusters = defaultdict(set) + for node, label in labeling.items(): + clusters[label].add(node) + return clusters.values() + + +def _color_network(G): + """Colors the network so that neighboring nodes all have distinct colors. + + Returns a dict keyed by color to a set of nodes with that color. + """ + coloring = {} # color => set(node) + colors = nx.coloring.greedy_color(G) + for node, color in colors.items(): + if color in coloring: + coloring[color].add(node) + else: + coloring[color] = {node} + return coloring + + +def _labeling_complete(labeling, G): + """Determines whether or not LPA is done. + + Label propagation is complete when all nodes have a label that is + in the set of highest frequency labels amongst its neighbors. + + Nodes with no neighbors are considered complete. + """ + return all( + labeling[v] in _most_frequent_labels(v, labeling, G) for v in G if len(G[v]) > 0 + ) + + +def _most_frequent_labels(node, labeling, G): + """Returns a set of all labels with maximum frequency in `labeling`. + + Input `labeling` should be a dict keyed by node to labels. + """ + if not G[node]: + # Nodes with no neighbors are themselves a community and are labeled + # accordingly, hence the immediate if statement. + return {labeling[node]} + + # Compute the frequencies of all neighbors of node + freqs = Counter(labeling[q] for q in G[node]) + max_freq = max(freqs.values()) + return {label for label, freq in freqs.items() if freq == max_freq} + + +def _update_label(node, labeling, G): + """Updates the label of a node using the Prec-Max tie breaking algorithm + + The algorithm is explained in: 'Community Detection via Semi-Synchronous + Label Propagation Algorithms' Cordasco and Gargano, 2011 + """ + high_labels = _most_frequent_labels(node, labeling, G) + if len(high_labels) == 1: + labeling[node] = high_labels.pop() + elif len(high_labels) > 1: + # Prec-Max + if labeling[node] not in high_labels: + labeling[node] = max(high_labels) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/leiden.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/leiden.py new file mode 100644 index 0000000..f79c012 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/leiden.py @@ -0,0 +1,162 @@ +"""Functions for detecting communities based on Leiden Community Detection +algorithm. + +These functions do not have NetworkX implementations. +They may only be run with an installable :doc:`backend ` +that supports them. +""" + +import itertools +from collections import deque + +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = ["leiden_communities", "leiden_partitions"] + + +@not_implemented_for("directed") +@py_random_state("seed") +@nx._dispatchable(edge_attrs="weight", implemented_by_nx=False) +def leiden_communities(G, weight="weight", resolution=1, max_level=None, seed=None): + r"""Find a best partition of `G` using Leiden Community Detection (backend required) + + Leiden Community Detection is an algorithm to extract the community structure + of a network based on modularity optimization. It is an improvement upon the + Louvain Community Detection algorithm. See :any:`louvain_communities`. + + Unlike the Louvain algorithm, it guarantees that communities are well connected in addition + to being faster and uncovering better partitions. [1]_ + + The algorithm works in 3 phases. On the first phase, it adds the nodes to a queue randomly + and assigns every node to be in its own community. For each node it tries to find the + maximum positive modularity gain by moving each node to all of its neighbor communities. + If a node is moved from its community, it adds to the rear of the queue all neighbors of + the node that do not belong to the node’s new community and that are not in the queue. + + The first phase continues until the queue is empty. + + The second phase consists in refining the partition $P$ obtained from the first phase. It starts + with a singleton partition $P_{refined}$. Then it merges nodes locally in $P_{refined}$ within + each community of the partition $P$. Nodes are merged with a community in $P_{refined}$ only if + both are sufficiently well connected to their community in $P$. This means that after the + refinement phase is concluded, communities in $P$ sometimes will have been split into multiple + communities. + + The third phase consists of aggregating the network by building a new network whose nodes are + now the communities found in the second phase. However, the non-refined partition is used to create + an initial partition for the aggregate network. + + Once this phase is complete it is possible to reapply the first and second phases creating bigger + communities with increased modularity. + + The above three phases are executed until no modularity gain is achieved or `max_level` number + of iterations have been performed. + + Parameters + ---------- + G : NetworkX graph + weight : string or None, optional (default="weight") + The name of an edge attribute that holds the numerical value + used as a weight. If None then each edge has weight 1. + resolution : float, optional (default=1) + If resolution is less than 1, the algorithm favors larger communities. + Greater than 1 favors smaller communities. + max_level : int or None, optional (default=None) + The maximum number of levels (steps of the algorithm) to compute. + Must be a positive integer or None. If None, then there is no max + level and the algorithm will run until converged. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + list + A list of disjoint sets (partition of `G`). Each set represents one community. + All communities together contain all the nodes in `G`. + + Examples + -------- + >>> import networkx as nx + >>> G = nx.petersen_graph() + >>> nx.community.leiden_communities(G, backend="example_backend") # doctest: +SKIP + [{2, 3, 5, 7, 8}, {0, 1, 4, 6, 9}] + + Notes + ----- + The order in which the nodes are considered can affect the final output. In the algorithm + the ordering happens using a random shuffle. + + References + ---------- + .. [1] Traag, V.A., Waltman, L. & van Eck, N.J. From Leiden to Leiden: guaranteeing + well-connected communities. Sci Rep 9, 5233 (2019). https://doi.org/10.1038/s41598-019-41695-z + + See Also + -------- + leiden_partitions + :any:`louvain_communities` + """ + partitions = leiden_partitions(G, weight, resolution, seed) + if max_level is not None: + if max_level <= 0: + raise ValueError("max_level argument must be a positive integer or None") + partitions = itertools.islice(partitions, max_level) + final_partition = deque(partitions, maxlen=1) + return final_partition.pop() + + +@not_implemented_for("directed") +@py_random_state("seed") +@nx._dispatchable(edge_attrs="weight", implemented_by_nx=False) +def leiden_partitions(G, weight="weight", resolution=1, seed=None): + """Yield partitions for each level of Leiden Community Detection (backend required) + + Leiden Community Detection is an algorithm to extract the community + structure of a network based on modularity optimization. + + The partitions across levels (steps of the algorithm) form a dendrogram + of communities. A dendrogram is a diagram representing a tree and each + level represents a partition of the G graph. The top level contains the + smallest communities and as you traverse to the bottom of the tree the + communities get bigger and the overall modularity increases making + the partition better. + + Each level is generated by executing the three phases of the Leiden Community + Detection algorithm. See :any:`leiden_communities`. + + Parameters + ---------- + G : NetworkX graph + weight : string or None, optional (default="weight") + The name of an edge attribute that holds the numerical value + used as a weight. If None then each edge has weight 1. + resolution : float, optional (default=1) + If resolution is less than 1, the algorithm favors larger communities. + Greater than 1 favors smaller communities. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Yields + ------ + list + A list of disjoint sets (partition of `G`). Each set represents one community. + All communities together contain all the nodes in `G`. The yielded partitions + increase modularity with each iteration. + + References + ---------- + .. [1] Traag, V.A., Waltman, L. & van Eck, N.J. From Leiden to Leiden: guaranteeing + well-connected communities. Sci Rep 9, 5233 (2019). https://doi.org/10.1038/s41598-019-41695-z + + See Also + -------- + leiden_communities + :any:`louvain_partitions` + """ + raise NotImplementedError( + "'leiden_partitions' is not implemented by networkx. " + "Please try a different backend." + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/local.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/local.py new file mode 100644 index 0000000..68fc06a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/local.py @@ -0,0 +1,220 @@ +""" +Local Community Detection Algorithms + +Local Community Detection (LCD) aims to detected one or a few communities +starting from certain source nodes in the network. This differs from Global +Community Detection (GCD), which aims to partition an entire network into +communities. + +LCD is often useful when only a portion of the graph is known or the +graph is large enough that GCD is infeasable + +[1]_ Gives a good introduction and overview of LCD + +References +---------- +.. [1] Baltsou, Georgia, Konstantinos Christopoulos, and Konstantinos Tsichlas. + Local community detection: A survey. IEEE Access 10 (2022): 110701-110726. + https://doi.org/10.1109/ACCESS.2022.3213980 + + +""" + +__all__ = ["greedy_source_expansion"] + + +def _clauset_greedy_source_expansion(G, *, source, cutoff=None): + if cutoff is None: + cutoff = float("inf") + C = {source} + B = {source} + U = G[source].keys() - C + T = {frozenset([node, nbr]) for node in B for nbr in G.neighbors(node)} + I = {edge for edge in T if all(node in C for node in edge)} + + R_value = 0 + while len(C) < cutoff: + if not U: + break + + max_R = 0 + best_node = None + best_node_B = best_node_T = best_node_I = set() + + for v in U: + R_tmp, B_tmp, T_tmp, I_tmp = _calculate_local_modularity_for_candidate( + G, v, C, B, T, I + ) + if R_tmp > max_R: + max_R = R_tmp + best_node = v + best_node_B = B_tmp + best_node_T = T_tmp + best_node_I = I_tmp + + C = C | {best_node} + U.update(G[best_node].keys() - C) + U.remove(best_node) + B = best_node_B + T = best_node_T + I = best_node_I + if max_R < R_value: + break + R_value = max_R + + return C + + +def _calculate_local_modularity_for_candidate(G, v, C, B, T, I): + """ + Compute the local modularity R and updated variables when adding node v to the community. + + Parameters + ---------- + G : NetworkX graph + The input graph. + v : node + The candidate node to add to the community. + C : set + The current set of community nodes. + B : set + The current set of boundary nodes. + T : set of frozenset + The current set of boundary edges. + I : set of frozenset + The current set of internal boundary edges. + + Returns + ------- + R_tmp : float + The local modularity after adding node v. + B_tmp : set + The updated set of boundary nodes. + T_tmp : set of frozenset + The updated set of boundary edges. + I_tmp : set of frozenset + The updated set of internal boundary edges. + """ + C_tmp = C | {v} + B_tmp = B.copy() + T_tmp = T.copy() + I_tmp = I.copy() + removed_B_nodes = set() + + # Update boundary nodes and edges + for nbr in G[v]: + if nbr not in C_tmp: + # v has nbrs not in the community, so it remains a boundary node + B_tmp.add(v) + # Add edge between v and nbr to boundary edges + T_tmp.add(frozenset([v, nbr])) + + if nbr in B: + # Check if nbr should be removed from boundary nodes + # Go through nbrs nbrs to see if it is still a boundary node + nbr_still_in_B = any(nbr_nbr not in C_tmp for nbr_nbr in G[nbr]) + if not nbr_still_in_B: + B_tmp.remove(nbr) + removed_B_nodes.add(nbr) + + if nbr in C_tmp: + # Add edge between v and nbr to internal edges + I_tmp.add(frozenset([v, nbr])) + + # Remove edges no longer in the boundary + for removed_node in removed_B_nodes: + for removed_node_nbr in G[removed_node]: + if removed_node_nbr not in B_tmp: + T_tmp.discard(frozenset([removed_node_nbr, removed_node])) + I_tmp.discard(frozenset([removed_node_nbr, removed_node])) + + R_tmp = len(I_tmp) / len(T_tmp) if len(T_tmp) > 0 else 1 + return R_tmp, B_tmp, T_tmp, I_tmp + + +ALGORITHMS = { + "clauset": _clauset_greedy_source_expansion, +} + + +def greedy_source_expansion(G, *, source, cutoff=None, method="clauset"): + r"""Find the local community around a source node. + + Find the local community around a source node using Greedy Source + Expansion. Greedy Source Expansion generally identifies a local community + starting from the source node and expands it based on the criteria of the + chosen algorithm. + + The algorithm is specified with the `method` keyword argument. + + * `"clauset"` [1]_ uses local modularity gain to determine local communities. + The algorithm adds nbring nodes that maximize local modularity to the + community iteratively, stopping when no additional nodes improve the modularity + or when a predefined cutoff is reached. + + Local modularity measures the density of edges within a community relative + to the total graph. By focusing on local modularity, the algorithm efficiently + uncovers communities around a specific node without requiring global + optimization over the entire graph. + + The algorithm assumes that the graph $G$ consists of a known community $C$ and + an unknown set of nodes $U$, which are adjacent to $C$ . The boundary of the + community $B$, consists of nodes in $C$ that have at least one nbr in $U$. + + Mathematically, the local modularity is expressed as: + + .. math:: + R = \frac{I}{T} + + where $T$ is the number of edges with one or more endpoints in $B$, and $I$ is the + number of those edges with neither endpoint in $U$. + + Parameters + ---------- + G : NetworkX graph + The input graph. + + source : node + The source node from which the community expansion begins. + + cutoff : int, optional (default=None) + The maximum number of nodes to include in the community. If None, the algorithm + expands until no further modularity gain can be made. + + method : string, optional (default='clauset') + The algorithm to use to carry out greedy source expansion. + Supported options: 'clauset'. Other inputs produce a ValueError + + Returns + ------- + set + A set of nodes representing the local community around the source node. + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> nx.community.greedy_source_expansion(G, source=16) + {16, 0, 4, 5, 6, 10} + + Notes + ----- + This algorithm is designed for detecting local communities around a specific node, + which is useful for large networks where global community detection is computationally + expensive. + + The result of the algorithm may vary based on the structure of the graph, the choice of + the source node, and the presence of ties between nodes during the greedy expansion process. + + References + ---------- + .. [1] Clauset, Aaron. Finding local community structure in networks. + Physical Review E—Statistical, Nonlinear, and Soft Matter Physics 72, no. 2 (2005): 026132. + https://arxiv.org/pdf/physics/0503036 + + """ + try: + algo = ALGORITHMS[method] + except KeyError as e: + raise ValueError(f"{method} is not a valid choice for an algorithm.") from e + + return algo(G, source=source, cutoff=cutoff) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/louvain.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/louvain.py new file mode 100644 index 0000000..c8407a8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/louvain.py @@ -0,0 +1,384 @@ +"""Functions for detecting communities based on Louvain Community Detection +Algorithm""" + +import itertools +from collections import defaultdict, deque + +import networkx as nx +from networkx.algorithms.community import modularity +from networkx.utils import py_random_state + +__all__ = ["louvain_communities", "louvain_partitions"] + + +@py_random_state("seed") +@nx._dispatchable(edge_attrs="weight") +def louvain_communities( + G, weight="weight", resolution=1, threshold=0.0000001, max_level=None, seed=None +): + r"""Find the best partition of a graph using the Louvain Community Detection + Algorithm. + + Louvain Community Detection Algorithm is a simple method to extract the community + structure of a network. This is a heuristic method based on modularity optimization. [1]_ + + The algorithm works in 2 steps. On the first step it assigns every node to be + in its own community and then for each node it tries to find the maximum positive + modularity gain by moving each node to all of its neighbor communities. If no positive + gain is achieved the node remains in its original community. + + The modularity gain obtained by moving an isolated node $i$ into a community $C$ can + easily be calculated by the following formula (combining [1]_ [2]_ and some algebra): + + .. math:: + \Delta Q = \frac{k_{i,in}}{2m} - \gamma\frac{ \Sigma_{tot} \cdot k_i}{2m^2} + + where $m$ is the size of the graph, $k_{i,in}$ is the sum of the weights of the links + from $i$ to nodes in $C$, $k_i$ is the sum of the weights of the links incident to node $i$, + $\Sigma_{tot}$ is the sum of the weights of the links incident to nodes in $C$ and $\gamma$ + is the resolution parameter. + + For the directed case the modularity gain can be computed using this formula according to [3]_ + + .. math:: + \Delta Q = \frac{k_{i,in}}{m} + - \gamma\frac{k_i^{out} \cdot\Sigma_{tot}^{in} + k_i^{in} \cdot \Sigma_{tot}^{out}}{m^2} + + where $k_i^{out}$, $k_i^{in}$ are the outer and inner weighted degrees of node $i$ and + $\Sigma_{tot}^{in}$, $\Sigma_{tot}^{out}$ are the sum of in-going and out-going links incident + to nodes in $C$. + + The first phase continues until no individual move can improve the modularity. + + The second phase consists in building a new network whose nodes are now the communities + found in the first phase. To do so, the weights of the links between the new nodes are given by + the sum of the weight of the links between nodes in the corresponding two communities. Once this + phase is complete it is possible to reapply the first phase creating bigger communities with + increased modularity. + + The above two phases are executed until no modularity gain is achieved (or is less than + the `threshold`, or until `max_levels` is reached). + + Be careful with self-loops in the input graph. These are treated as + previously reduced communities -- as if the process had been started + in the middle of the algorithm. Large self-loop edge weights thus + represent strong communities and in practice may be hard to add + other nodes to. If your input graph edge weights for self-loops + do not represent already reduced communities you may want to remove + the self-loops before inputting that graph. + + Parameters + ---------- + G : NetworkX graph + weight : string or None, optional (default="weight") + The name of an edge attribute that holds the numerical value + used as a weight. If None then each edge has weight 1. + resolution : float, optional (default=1) + If resolution is less than 1, the algorithm favors larger communities. + Greater than 1 favors smaller communities + threshold : float, optional (default=0.0000001) + Modularity gain threshold for each level. If the gain of modularity + between 2 levels of the algorithm is less than the given threshold + then the algorithm stops and returns the resulting communities. + max_level : int or None, optional (default=None) + The maximum number of levels (steps of the algorithm) to compute. + Must be a positive integer or None. If None, then there is no max + level and the threshold parameter determines the stopping condition. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + list + A list of sets (partition of `G`). Each set represents one community and contains + all the nodes that constitute it. + + Examples + -------- + >>> import networkx as nx + >>> G = nx.petersen_graph() + >>> nx.community.louvain_communities(G, seed=123) + [{0, 4, 5, 7, 9}, {1, 2, 3, 6, 8}] + + Notes + ----- + The order in which the nodes are considered can affect the final output. In the algorithm + the ordering happens using a random shuffle. + + References + ---------- + .. [1] Blondel, V.D. et al. Fast unfolding of communities in + large networks. J. Stat. Mech 10008, 1-12(2008). https://doi.org/10.1088/1742-5468/2008/10/P10008 + .. [2] Traag, V.A., Waltman, L. & van Eck, N.J. From Louvain to Leiden: guaranteeing + well-connected communities. Sci Rep 9, 5233 (2019). https://doi.org/10.1038/s41598-019-41695-z + .. [3] Nicolas Dugué, Anthony Perez. Directed Louvain : maximizing modularity in directed networks. + [Research Report] Université d’Orléans. 2015. hal-01231784. https://hal.archives-ouvertes.fr/hal-01231784 + + See Also + -------- + louvain_partitions + :any:`leiden_communities` + """ + + partitions = louvain_partitions(G, weight, resolution, threshold, seed) + if max_level is not None: + if max_level <= 0: + raise ValueError("max_level argument must be a positive integer or None") + partitions = itertools.islice(partitions, max_level) + final_partition = deque(partitions, maxlen=1) + return final_partition.pop() + + +@py_random_state("seed") +@nx._dispatchable(edge_attrs="weight") +def louvain_partitions( + G, weight="weight", resolution=1, threshold=0.0000001, seed=None +): + """Yield partitions for each level of the Louvain Community Detection Algorithm + + Louvain Community Detection Algorithm is a simple method to extract the community + structure of a network. This is a heuristic method based on modularity optimization. [1]_ + + The partitions at each level (step of the algorithm) form a dendrogram of communities. + A dendrogram is a diagram representing a tree and each level represents + a partition of the G graph. The top level contains the smallest communities + and as you traverse to the bottom of the tree the communities get bigger + and the overall modularity increases making the partition better. + + Each level is generated by executing the two phases of the Louvain Community + Detection Algorithm. + + Be careful with self-loops in the input graph. These are treated as + previously reduced communities -- as if the process had been started + in the middle of the algorithm. Large self-loop edge weights thus + represent strong communities and in practice may be hard to add + other nodes to. If your input graph edge weights for self-loops + do not represent already reduced communities you may want to remove + the self-loops before inputting that graph. + + Parameters + ---------- + G : NetworkX graph + weight : string or None, optional (default="weight") + The name of an edge attribute that holds the numerical value + used as a weight. If None then each edge has weight 1. + resolution : float, optional (default=1) + If resolution is less than 1, the algorithm favors larger communities. + Greater than 1 favors smaller communities + threshold : float, optional (default=0.0000001) + Modularity gain threshold for each level. If the gain of modularity + between 2 levels of the algorithm is less than the given threshold + then the algorithm stops and returns the resulting communities. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Yields + ------ + list + A list of sets (partition of `G`). Each set represents one community and contains + all the nodes that constitute it. + + References + ---------- + .. [1] Blondel, V.D. et al. Fast unfolding of communities in + large networks. J. Stat. Mech 10008, 1-12(2008) + + See Also + -------- + louvain_communities + :any:`leiden_partitions` + """ + + partition = [{u} for u in G.nodes()] + if nx.is_empty(G): + yield partition + return + mod = modularity(G, partition, resolution=resolution, weight=weight) + is_directed = G.is_directed() + if G.is_multigraph(): + graph = _convert_multigraph(G, weight, is_directed) + else: + graph = G.__class__() + graph.add_nodes_from(G) + graph.add_weighted_edges_from(G.edges(data=weight, default=1)) + + m = graph.size(weight="weight") + partition, inner_partition, improvement = _one_level( + graph, m, partition, resolution, is_directed, seed + ) + improvement = True + while improvement: + # gh-5901 protect the sets in the yielded list from further manipulation here + yield [s.copy() for s in partition] + new_mod = modularity( + graph, inner_partition, resolution=resolution, weight="weight" + ) + if new_mod - mod <= threshold: + return + mod = new_mod + graph = _gen_graph(graph, inner_partition) + partition, inner_partition, improvement = _one_level( + graph, m, partition, resolution, is_directed, seed + ) + + +def _one_level(G, m, partition, resolution=1, is_directed=False, seed=None): + """Calculate one level of the Louvain partitions tree + + Parameters + ---------- + G : NetworkX Graph/DiGraph + The graph from which to detect communities + m : number + The size of the graph `G`. + partition : list of sets of nodes + A valid partition of the graph `G` + resolution : positive number + The resolution parameter for computing the modularity of a partition + is_directed : bool + True if `G` is a directed graph. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + """ + node2com = {u: i for i, u in enumerate(G.nodes())} + inner_partition = [{u} for u in G.nodes()] + if is_directed: + in_degrees = dict(G.in_degree(weight="weight")) + out_degrees = dict(G.out_degree(weight="weight")) + Stot_in = list(in_degrees.values()) + Stot_out = list(out_degrees.values()) + # Calculate weights for both in and out neighbors without considering self-loops + nbrs = {} + for u in G: + nbrs[u] = defaultdict(float) + for _, n, wt in G.out_edges(u, data="weight"): + if u != n: + nbrs[u][n] += wt + for n, _, wt in G.in_edges(u, data="weight"): + if u != n: + nbrs[u][n] += wt + else: + degrees = dict(G.degree(weight="weight")) + Stot = list(degrees.values()) + nbrs = {u: {v: data["weight"] for v, data in G[u].items() if v != u} for u in G} + rand_nodes = list(G.nodes) + seed.shuffle(rand_nodes) + nb_moves = 1 + improvement = False + while nb_moves > 0: + nb_moves = 0 + for u in rand_nodes: + best_mod = 0 + best_com = node2com[u] + weights2com = _neighbor_weights(nbrs[u], node2com) + if is_directed: + in_degree = in_degrees[u] + out_degree = out_degrees[u] + Stot_in[best_com] -= in_degree + Stot_out[best_com] -= out_degree + remove_cost = ( + -weights2com[best_com] / m + + resolution + * (out_degree * Stot_in[best_com] + in_degree * Stot_out[best_com]) + / m**2 + ) + else: + degree = degrees[u] + Stot[best_com] -= degree + remove_cost = -weights2com[best_com] / m + resolution * ( + Stot[best_com] * degree + ) / (2 * m**2) + for nbr_com, wt in weights2com.items(): + if is_directed: + gain = ( + remove_cost + + wt / m + - resolution + * ( + out_degree * Stot_in[nbr_com] + + in_degree * Stot_out[nbr_com] + ) + / m**2 + ) + else: + gain = ( + remove_cost + + wt / m + - resolution * (Stot[nbr_com] * degree) / (2 * m**2) + ) + if gain > best_mod: + best_mod = gain + best_com = nbr_com + if is_directed: + Stot_in[best_com] += in_degree + Stot_out[best_com] += out_degree + else: + Stot[best_com] += degree + if best_com != node2com[u]: + com = G.nodes[u].get("nodes", {u}) + partition[node2com[u]].difference_update(com) + inner_partition[node2com[u]].remove(u) + partition[best_com].update(com) + inner_partition[best_com].add(u) + improvement = True + nb_moves += 1 + node2com[u] = best_com + partition = list(filter(len, partition)) + inner_partition = list(filter(len, inner_partition)) + return partition, inner_partition, improvement + + +def _neighbor_weights(nbrs, node2com): + """Calculate weights between node and its neighbor communities. + + Parameters + ---------- + nbrs : dictionary + Dictionary with nodes' neighbors as keys and their edge weight as value. + node2com : dictionary + Dictionary with all graph's nodes as keys and their community index as value. + + """ + weights = defaultdict(float) + for nbr, wt in nbrs.items(): + weights[node2com[nbr]] += wt + return weights + + +def _gen_graph(G, partition): + """Generate a new graph based on the partitions of a given graph""" + H = G.__class__() + node2com = {} + for i, part in enumerate(partition): + nodes = set() + for node in part: + node2com[node] = i + nodes.update(G.nodes[node].get("nodes", {node})) + H.add_node(i, nodes=nodes) + + for node1, node2, wt in G.edges(data=True): + wt = wt["weight"] + com1 = node2com[node1] + com2 = node2com[node2] + temp = H.get_edge_data(com1, com2, {"weight": 0})["weight"] + H.add_edge(com1, com2, weight=wt + temp) + return H + + +def _convert_multigraph(G, weight, is_directed): + """Convert a Multigraph to normal Graph""" + if is_directed: + H = nx.DiGraph() + else: + H = nx.Graph() + H.add_nodes_from(G) + for u, v, wt in G.edges(data=weight, default=1): + if H.has_edge(u, v): + H[u][v]["weight"] += wt + else: + H.add_edge(u, v, weight=wt) + return H diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/lukes.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/lukes.py new file mode 100644 index 0000000..08dd7cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/lukes.py @@ -0,0 +1,227 @@ +"""Lukes Algorithm for exact optimal weighted tree partitioning.""" + +from copy import deepcopy +from functools import lru_cache +from random import choice + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["lukes_partitioning"] + +D_EDGE_W = "weight" +D_EDGE_VALUE = 1.0 +D_NODE_W = "weight" +D_NODE_VALUE = 1 +PKEY = "partitions" +CLUSTER_EVAL_CACHE_SIZE = 2048 + + +def _split_n_from(n, min_size_of_first_part): + # splits j in two parts of which the first is at least + # the second argument + assert n >= min_size_of_first_part + for p1 in range(min_size_of_first_part, n + 1): + yield p1, n - p1 + + +@nx._dispatchable(node_attrs="node_weight", edge_attrs="edge_weight") +def lukes_partitioning(G, max_size, node_weight=None, edge_weight=None): + """Optimal partitioning of a weighted tree using the Lukes algorithm. + + This algorithm partitions a connected, acyclic graph featuring integer + node weights and float edge weights. The resulting clusters are such + that the total weight of the nodes in each cluster does not exceed + max_size and that the weight of the edges that are cut by the partition + is minimum. The algorithm is based on [1]_. + + Parameters + ---------- + G : NetworkX graph + + max_size : int + Maximum weight a partition can have in terms of sum of + node_weight for all nodes in the partition + + edge_weight : key + Edge data key to use as weight. If None, the weights are all + set to one. + + node_weight : key + Node data key to use as weight. If None, the weights are all + set to one. The data must be int. + + Returns + ------- + partition : list + A list of sets of nodes representing the clusters of the + partition. + + Raises + ------ + NotATree + If G is not a tree. + TypeError + If any of the values of node_weight is not int. + + References + ---------- + .. [1] Lukes, J. A. (1974). + "Efficient Algorithm for the Partitioning of Trees." + IBM Journal of Research and Development, 18(3), 217–224. + + """ + # First sanity check and tree preparation + if not nx.is_tree(G): + raise nx.NotATree("lukes_partitioning works only on trees") + else: + if nx.is_directed(G): + root = [n for n, d in G.in_degree() if d == 0] + assert len(root) == 1 + root = root[0] + t_G = deepcopy(G) + else: + root = choice(list(G.nodes)) + # this has the desirable side effect of not inheriting attributes + t_G = nx.dfs_tree(G, root) + + # Since we do not want to screw up the original graph, + # if we have a blank attribute, we make a deepcopy + if edge_weight is None or node_weight is None: + safe_G = deepcopy(G) + if edge_weight is None: + nx.set_edge_attributes(safe_G, D_EDGE_VALUE, D_EDGE_W) + edge_weight = D_EDGE_W + if node_weight is None: + nx.set_node_attributes(safe_G, D_NODE_VALUE, D_NODE_W) + node_weight = D_NODE_W + else: + safe_G = G + + # Second sanity check + # The values of node_weight MUST BE int. + # I cannot see any room for duck typing without incurring serious + # danger of subtle bugs. + all_n_attr = nx.get_node_attributes(safe_G, node_weight).values() + for x in all_n_attr: + if not isinstance(x, int): + raise TypeError( + "lukes_partitioning needs integer " + f"values for node_weight ({node_weight})" + ) + + # SUBROUTINES ----------------------- + # these functions are defined here for two reasons: + # - brevity: we can leverage global "safe_G" + # - caching: signatures are hashable + + @not_implemented_for("undirected") + # this is intended to be called only on t_G + def _leaves(gr): + for x in gr.nodes: + if not nx.descendants(gr, x): + yield x + + @not_implemented_for("undirected") + def _a_parent_of_leaves_only(gr): + tleaves = set(_leaves(gr)) + for n in set(gr.nodes) - tleaves: + if all(x in tleaves for x in nx.descendants(gr, n)): + return n + + @lru_cache(CLUSTER_EVAL_CACHE_SIZE) + def _value_of_cluster(cluster): + valid_edges = [e for e in safe_G.edges if e[0] in cluster and e[1] in cluster] + return sum(safe_G.edges[e][edge_weight] for e in valid_edges) + + def _value_of_partition(partition): + return sum(_value_of_cluster(frozenset(c)) for c in partition) + + @lru_cache(CLUSTER_EVAL_CACHE_SIZE) + def _weight_of_cluster(cluster): + return sum(safe_G.nodes[n][node_weight] for n in cluster) + + def _pivot(partition, node): + ccx = [c for c in partition if node in c] + assert len(ccx) == 1 + return ccx[0] + + def _concatenate_or_merge(partition_1, partition_2, x, i, ref_weight): + ccx = _pivot(partition_1, x) + cci = _pivot(partition_2, i) + merged_xi = ccx.union(cci) + + # We first check if we can do the merge. + # If so, we do the actual calculations, otherwise we concatenate + if _weight_of_cluster(frozenset(merged_xi)) <= ref_weight: + cp1 = list(filter(lambda x: x != ccx, partition_1)) + cp2 = list(filter(lambda x: x != cci, partition_2)) + + option_2 = [merged_xi] + cp1 + cp2 + return option_2, _value_of_partition(option_2) + else: + option_1 = partition_1 + partition_2 + return option_1, _value_of_partition(option_1) + + # INITIALIZATION ----------------------- + leaves = set(_leaves(t_G)) + for lv in leaves: + t_G.nodes[lv][PKEY] = {} + slot = safe_G.nodes[lv][node_weight] + t_G.nodes[lv][PKEY][slot] = [{lv}] + t_G.nodes[lv][PKEY][0] = [{lv}] + + for inner in [x for x in t_G.nodes if x not in leaves]: + t_G.nodes[inner][PKEY] = {} + slot = safe_G.nodes[inner][node_weight] + t_G.nodes[inner][PKEY][slot] = [{inner}] + nx._clear_cache(t_G) + + # CORE ALGORITHM ----------------------- + while True: + x_node = _a_parent_of_leaves_only(t_G) + weight_of_x = safe_G.nodes[x_node][node_weight] + best_value = 0 + best_partition = None + bp_buffer = {} + x_descendants = nx.descendants(t_G, x_node) + for i_node in x_descendants: + for j in range(weight_of_x, max_size + 1): + for a, b in _split_n_from(j, weight_of_x): + if ( + a not in t_G.nodes[x_node][PKEY] + or b not in t_G.nodes[i_node][PKEY] + ): + # it's not possible to form this particular weight sum + continue + + part1 = t_G.nodes[x_node][PKEY][a] + part2 = t_G.nodes[i_node][PKEY][b] + part, value = _concatenate_or_merge(part1, part2, x_node, i_node, j) + + if j not in bp_buffer or bp_buffer[j][1] < value: + # we annotate in the buffer the best partition for j + bp_buffer[j] = part, value + + # we also keep track of the overall best partition + if best_value <= value: + best_value = value + best_partition = part + + # as illustrated in Lukes, once we finished a child, we can + # discharge the partitions we found into the graph + # (the key phrase is make all x == x') + # so that they are used by the subsequent children + for w, (best_part_for_vl, vl) in bp_buffer.items(): + t_G.nodes[x_node][PKEY][w] = best_part_for_vl + bp_buffer.clear() + + # the absolute best partition for this node + # across all weights has to be stored at 0 + t_G.nodes[x_node][PKEY][0] = best_partition + t_G.remove_nodes_from(x_descendants) + + if x_node == root: + # the 0-labeled partition of root + # is the optimal one for the whole tree + return t_G.nodes[root][PKEY][0] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/modularity_max.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/modularity_max.py new file mode 100644 index 0000000..f465e01 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/modularity_max.py @@ -0,0 +1,451 @@ +"""Functions for detecting communities based on modularity.""" + +from collections import defaultdict + +import networkx as nx +from networkx.algorithms.community.quality import modularity +from networkx.utils import not_implemented_for +from networkx.utils.mapped_queue import MappedQueue + +__all__ = [ + "greedy_modularity_communities", + "naive_greedy_modularity_communities", +] + + +def _greedy_modularity_communities_generator(G, weight=None, resolution=1): + r"""Yield community partitions of G and the modularity change at each step. + + This function performs Clauset-Newman-Moore greedy modularity maximization [2]_ + At each step of the process it yields the change in modularity that will occur in + the next step followed by yielding the new community partition after that step. + + Greedy modularity maximization begins with each node in its own community + and repeatedly joins the pair of communities that lead to the largest + modularity until one community contains all nodes (the partition has one set). + + This function maximizes the generalized modularity, where `resolution` + is the resolution parameter, often expressed as $\gamma$. + See :func:`~networkx.algorithms.community.quality.modularity`. + + Parameters + ---------- + G : NetworkX graph + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + resolution : float (default=1) + If resolution is less than 1, modularity favors larger communities. + Greater than 1 favors smaller communities. + + Yields + ------ + Alternating yield statements produce the following two objects: + + communities: dict_values + A dict_values of frozensets of nodes, one for each community. + This represents a partition of the nodes of the graph into communities. + The first yield is the partition with each node in its own community. + + dq: float + The change in modularity when merging the next two communities + that leads to the largest modularity. + + See Also + -------- + modularity + + References + ---------- + .. [1] Newman, M. E. J. "Networks: An Introduction", page 224 + Oxford University Press 2011. + .. [2] Clauset, A., Newman, M. E., & Moore, C. + "Finding community structure in very large networks." + Physical Review E 70(6), 2004. + .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community + Detection" Phys. Rev. E74, 2006. + .. [4] Newman, M. E. J."Analysis of weighted networks" + Physical Review E 70(5 Pt 2):056131, 2004. + """ + directed = G.is_directed() + N = G.number_of_nodes() + + # Count edges (or the sum of edge-weights for weighted graphs) + m = G.size(weight) + q0 = 1 / m + + # Calculate degrees (notation from the papers) + # a : the fraction of (weighted) out-degree for each node + # b : the fraction of (weighted) in-degree for each node + if directed: + a = {node: deg_out * q0 for node, deg_out in G.out_degree(weight=weight)} + b = {node: deg_in * q0 for node, deg_in in G.in_degree(weight=weight)} + else: + a = b = {node: deg * q0 * 0.5 for node, deg in G.degree(weight=weight)} + + # this preliminary step collects the edge weights for each node pair + # It handles multigraph and digraph and works fine for graph. + dq_dict = defaultdict(lambda: defaultdict(float)) + for u, v, wt in G.edges(data=weight, default=1): + if u == v: + continue + dq_dict[u][v] += wt + dq_dict[v][u] += wt + + # now scale and subtract the expected edge-weights term + for u, nbrdict in dq_dict.items(): + for v, wt in nbrdict.items(): + dq_dict[u][v] = q0 * wt - resolution * (a[u] * b[v] + b[u] * a[v]) + + # Use -dq to get a max_heap instead of a min_heap + # dq_heap holds a heap for each node's neighbors + dq_heap = {u: MappedQueue({(u, v): -dq for v, dq in dq_dict[u].items()}) for u in G} + # H -> all_dq_heap holds a heap with the best items for each node + H = MappedQueue([dq_heap[n].heap[0] for n in G if len(dq_heap[n]) > 0]) + + # Initialize single-node communities + communities = {n: frozenset([n]) for n in G} + yield communities.values() + + # Merge the two communities that lead to the largest modularity + while len(H) > 1: + # Find best merge + # Remove from heap of row maxes + # Ties will be broken by choosing the pair with lowest min community id + try: + negdq, u, v = H.pop() + except IndexError: + break + dq = -negdq + yield dq + # Remove best merge from row u heap + dq_heap[u].pop() + # Push new row max onto H + if len(dq_heap[u]) > 0: + H.push(dq_heap[u].heap[0]) + # If this element was also at the root of row v, we need to remove the + # duplicate entry from H + if dq_heap[v].heap[0] == (v, u): + H.remove((v, u)) + # Remove best merge from row v heap + dq_heap[v].remove((v, u)) + # Push new row max onto H + if len(dq_heap[v]) > 0: + H.push(dq_heap[v].heap[0]) + else: + # Duplicate wasn't in H, just remove from row v heap + dq_heap[v].remove((v, u)) + + # Perform merge + communities[v] = frozenset(communities[u] | communities[v]) + del communities[u] + + # Get neighbor communities connected to the merged communities + u_nbrs = set(dq_dict[u]) + v_nbrs = set(dq_dict[v]) + all_nbrs = (u_nbrs | v_nbrs) - {u, v} + both_nbrs = u_nbrs & v_nbrs + # Update dq for merge of u into v + for w in all_nbrs: + # Calculate new dq value + if w in both_nbrs: + dq_vw = dq_dict[v][w] + dq_dict[u][w] + elif w in v_nbrs: + dq_vw = dq_dict[v][w] - resolution * (a[u] * b[w] + a[w] * b[u]) + else: # w in u_nbrs + dq_vw = dq_dict[u][w] - resolution * (a[v] * b[w] + a[w] * b[v]) + # Update rows v and w + for row, col in [(v, w), (w, v)]: + dq_heap_row = dq_heap[row] + # Update dict for v,w only (u is removed below) + dq_dict[row][col] = dq_vw + # Save old max of per-row heap + if len(dq_heap_row) > 0: + d_oldmax = dq_heap_row.heap[0] + else: + d_oldmax = None + # Add/update heaps + d = (row, col) + d_negdq = -dq_vw + # Save old value for finding heap index + if w in v_nbrs: + # Update existing element in per-row heap + dq_heap_row.update(d, d, priority=d_negdq) + else: + # We're creating a new nonzero element, add to heap + dq_heap_row.push(d, priority=d_negdq) + # Update heap of row maxes if necessary + if d_oldmax is None: + # No entries previously in this row, push new max + H.push(d, priority=d_negdq) + else: + # We've updated an entry in this row, has the max changed? + row_max = dq_heap_row.heap[0] + if d_oldmax != row_max or d_oldmax.priority != row_max.priority: + H.update(d_oldmax, row_max) + + # Remove row/col u from dq_dict matrix + for w in dq_dict[u]: + # Remove from dict + dq_old = dq_dict[w][u] + del dq_dict[w][u] + # Remove from heaps if we haven't already + if w != v: + # Remove both row and column + for row, col in [(w, u), (u, w)]: + dq_heap_row = dq_heap[row] + # Check if replaced dq is row max + d_old = (row, col) + if dq_heap_row.heap[0] == d_old: + # Update per-row heap and heap of row maxes + dq_heap_row.remove(d_old) + H.remove(d_old) + # Update row max + if len(dq_heap_row) > 0: + H.push(dq_heap_row.heap[0]) + else: + # Only update per-row heap + dq_heap_row.remove(d_old) + + del dq_dict[u] + # Mark row u as deleted, but keep placeholder + dq_heap[u] = MappedQueue() + # Merge u into v and update a + a[v] += a[u] + a[u] = 0 + if directed: + b[v] += b[u] + b[u] = 0 + + yield communities.values() + + +@nx._dispatchable(edge_attrs="weight") +def greedy_modularity_communities( + G, + weight=None, + resolution=1, + cutoff=1, + best_n=None, +): + r"""Find communities in G using greedy modularity maximization. + + This function uses Clauset-Newman-Moore greedy modularity maximization [2]_ + to find the community partition with the largest modularity. + + Greedy modularity maximization begins with each node in its own community + and repeatedly joins the pair of communities that lead to the largest + modularity until no further increase in modularity is possible (a maximum). + Two keyword arguments adjust the stopping condition. `cutoff` is a lower + limit on the number of communities so you can stop the process before + reaching a maximum (used to save computation time). `best_n` is an upper + limit on the number of communities so you can make the process continue + until at most n communities remain even if the maximum modularity occurs + for more. To obtain exactly n communities, set both `cutoff` and `best_n` to n. + + This function maximizes the generalized modularity, where `resolution` + is the resolution parameter, often expressed as $\gamma$. + See :func:`~networkx.algorithms.community.quality.modularity`. + + Parameters + ---------- + G : NetworkX graph + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + resolution : float, optional (default=1) + If resolution is less than 1, modularity favors larger communities. + Greater than 1 favors smaller communities. + + cutoff : int, optional (default=1) + A minimum number of communities below which the merging process stops. + The process stops at this number of communities even if modularity + is not maximized. The goal is to let the user stop the process early. + The process stops before the cutoff if it finds a maximum of modularity. + + best_n : int or None, optional (default=None) + A maximum number of communities above which the merging process will + not stop. This forces community merging to continue after modularity + starts to decrease until `best_n` communities remain. + If ``None``, don't force it to continue beyond a maximum. + + Raises + ------ + ValueError : If the `cutoff` or `best_n` value is not in the range + ``[1, G.number_of_nodes()]``, or if `best_n` < `cutoff`. + + Returns + ------- + communities: list + A list of frozensets of nodes, one for each community. + Sorted by length with largest communities first. + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> c = nx.community.greedy_modularity_communities(G) + >>> sorted(c[0]) + [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33] + + See Also + -------- + modularity + + References + ---------- + .. [1] Newman, M. E. J. "Networks: An Introduction", page 224 + Oxford University Press 2011. + .. [2] Clauset, A., Newman, M. E., & Moore, C. + "Finding community structure in very large networks." + Physical Review E 70(6), 2004. + .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community + Detection" Phys. Rev. E74, 2006. + .. [4] Newman, M. E. J."Analysis of weighted networks" + Physical Review E 70(5 Pt 2):056131, 2004. + """ + if not G.size(): + return [{n} for n in G] + + if (cutoff < 1) or (cutoff > G.number_of_nodes()): + raise ValueError(f"cutoff must be between 1 and {len(G)}. Got {cutoff}.") + if best_n is not None: + if (best_n < 1) or (best_n > G.number_of_nodes()): + raise ValueError(f"best_n must be between 1 and {len(G)}. Got {best_n}.") + if best_n < cutoff: + raise ValueError(f"Must have best_n >= cutoff. Got {best_n} < {cutoff}") + if best_n == 1: + return [set(G)] + else: + best_n = G.number_of_nodes() + + # retrieve generator object to construct output + community_gen = _greedy_modularity_communities_generator( + G, weight=weight, resolution=resolution + ) + + # construct the first best community + communities = next(community_gen) + + # continue merging communities until one of the breaking criteria is satisfied + while len(communities) > cutoff: + try: + dq = next(community_gen) + # StopIteration occurs when communities are the connected components + except StopIteration: + communities = sorted(communities, key=len, reverse=True) + # if best_n requires more merging, merge big sets for highest modularity + while len(communities) > best_n: + comm1, comm2, *rest = communities + communities = [comm1 ^ comm2] + communities.extend(rest) + return communities + + # keep going unless max_mod is reached or best_n says to merge more + if dq < 0 and len(communities) <= best_n: + break + communities = next(community_gen) + + return sorted(communities, key=len, reverse=True) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def naive_greedy_modularity_communities(G, resolution=1, weight=None): + r"""Find communities in G using greedy modularity maximization. + + This implementation is O(n^4), much slower than alternatives, but it is + provided as an easy-to-understand reference implementation. + + Greedy modularity maximization begins with each node in its own community + and joins the pair of communities that most increases modularity until no + such pair exists. + + This function maximizes the generalized modularity, where `resolution` + is the resolution parameter, often expressed as $\gamma$. + See :func:`~networkx.algorithms.community.quality.modularity`. + + Parameters + ---------- + G : NetworkX graph + Graph must be simple and undirected. + + resolution : float (default=1) + If resolution is less than 1, modularity favors larger communities. + Greater than 1 favors smaller communities. + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + list + A list of sets of nodes, one for each community. + Sorted by length with largest communities first. + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> c = nx.community.naive_greedy_modularity_communities(G) + >>> sorted(c[0]) + [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33] + + See Also + -------- + greedy_modularity_communities + modularity + """ + # First create one community for each node + communities = [frozenset([u]) for u in G.nodes()] + # Track merges + merges = [] + # Greedily merge communities until no improvement is possible + old_modularity = None + new_modularity = modularity(G, communities, resolution=resolution, weight=weight) + while old_modularity is None or new_modularity > old_modularity: + # Save modularity for comparison + old_modularity = new_modularity + # Find best pair to merge + trial_communities = list(communities) + to_merge = None + for i, u in enumerate(communities): + for j, v in enumerate(communities): + # Skip i==j and empty communities + if j <= i or len(u) == 0 or len(v) == 0: + continue + # Merge communities u and v + trial_communities[j] = u | v + trial_communities[i] = frozenset([]) + trial_modularity = modularity( + G, trial_communities, resolution=resolution, weight=weight + ) + if trial_modularity >= new_modularity: + # Check if strictly better or tie + if trial_modularity > new_modularity: + # Found new best, save modularity and group indexes + new_modularity = trial_modularity + to_merge = (i, j, new_modularity - old_modularity) + elif to_merge and min(i, j) < min(to_merge[0], to_merge[1]): + # Break ties by choosing pair with lowest min id + new_modularity = trial_modularity + to_merge = (i, j, new_modularity - old_modularity) + # Un-merge + trial_communities[i] = u + trial_communities[j] = v + if to_merge is not None: + # If the best merge improves modularity, use it + merges.append(to_merge) + i, j, dq = to_merge + u, v = communities[i], communities[j] + communities[j] = u | v + communities[i] = frozenset([]) + # Remove empty communities and sort + return sorted((c for c in communities if len(c) > 0), key=len, reverse=True) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/quality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/quality.py new file mode 100644 index 0000000..4b4dcbb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/quality.py @@ -0,0 +1,347 @@ +"""Functions for measuring the quality of a partition (into +communities). + +""" + +from itertools import combinations + +import networkx as nx +from networkx import NetworkXError +from networkx.algorithms.community.community_utils import is_partition +from networkx.utils.decorators import argmap + +__all__ = ["modularity", "partition_quality"] + + +class NotAPartition(NetworkXError): + """Raised if a given collection is not a partition.""" + + def __init__(self, G, collection): + msg = f"{collection} is not a valid partition of the graph {G}" + super().__init__(msg) + + +def _require_partition(G, partition): + """Decorator to check that a valid partition is input to a function + + Raises :exc:`networkx.NetworkXError` if the partition is not valid. + + This decorator should be used on functions whose first two arguments + are a graph and a partition of the nodes of that graph (in that + order):: + + >>> @require_partition + ... def foo(G, partition): + ... print("partition is valid!") + ... + >>> G = nx.complete_graph(5) + >>> partition = [{0, 1}, {2, 3}, {4}] + >>> foo(G, partition) + partition is valid! + >>> partition = [{0}, {2, 3}, {4}] + >>> foo(G, partition) + Traceback (most recent call last): + ... + networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G + >>> partition = [{0, 1}, {1, 2, 3}, {4}] + >>> foo(G, partition) + Traceback (most recent call last): + ... + networkx.exception.NetworkXError: `partition` is not a valid partition of the nodes of G + + """ + if is_partition(G, partition): + return G, partition + raise nx.NetworkXError("`partition` is not a valid partition of the nodes of G") + + +require_partition = argmap(_require_partition, (0, 1)) + + +@nx._dispatchable +def intra_community_edges(G, partition): + """Returns the number of intra-community edges for a partition of `G`. + + Parameters + ---------- + G : NetworkX graph. + + partition : iterable of sets of nodes + This must be a partition of the nodes of `G`. + + The "intra-community edges" are those edges joining a pair of nodes + in the same block of the partition. + + """ + return sum(G.subgraph(block).size() for block in partition) + + +@nx._dispatchable +def inter_community_edges(G, partition): + """Returns the number of inter-community edges for a partition of `G`. + according to the given + partition of the nodes of `G`. + + Parameters + ---------- + G : NetworkX graph. + + partition : iterable of sets of nodes + This must be a partition of the nodes of `G`. + + The *inter-community edges* are those edges joining a pair of nodes + in different blocks of the partition. + + Implementation note: this function creates an intermediate graph + that may require the same amount of memory as that of `G`. + + """ + # Alternate implementation that does not require constructing a new + # graph object (but does require constructing an affiliation + # dictionary): + # + # aff = dict(chain.from_iterable(((v, block) for v in block) + # for block in partition)) + # return sum(1 for u, v in G.edges() if aff[u] != aff[v]) + # + MG = nx.MultiDiGraph if G.is_directed() else nx.MultiGraph + return nx.quotient_graph(G, partition, create_using=MG).size() + + +@nx._dispatchable +def inter_community_non_edges(G, partition): + """Returns the number of inter-community non-edges according to the + given partition of the nodes of `G`. + + Parameters + ---------- + G : NetworkX graph. + + partition : iterable of sets of nodes + This must be a partition of the nodes of `G`. + + A *non-edge* is a pair of nodes (undirected if `G` is undirected) + that are not adjacent in `G`. The *inter-community non-edges* are + those non-edges on a pair of nodes in different blocks of the + partition. + + Implementation note: this function creates two intermediate graphs, + which may require up to twice the amount of memory as required to + store `G`. + + """ + # Alternate implementation that does not require constructing two + # new graph objects (but does require constructing an affiliation + # dictionary): + # + # aff = dict(chain.from_iterable(((v, block) for v in block) + # for block in partition)) + # return sum(1 for u, v in nx.non_edges(G) if aff[u] != aff[v]) + # + return inter_community_edges(nx.complement(G), partition) + + +@nx._dispatchable(edge_attrs="weight") +def modularity(G, communities, weight="weight", resolution=1): + r"""Returns the modularity of the given partition of the graph. + + Modularity is defined in [1]_ as + + .. math:: + Q = \frac{1}{2m} \sum_{ij} \left( A_{ij} - \gamma\frac{k_ik_j}{2m}\right) + \delta(c_i,c_j) + + where $m$ is the number of edges (or sum of all edge weights as in [5]_), + $A$ is the adjacency matrix of `G`, $k_i$ is the (weighted) degree of $i$, + $\gamma$ is the resolution parameter, and $\delta(c_i, c_j)$ is 1 if $i$ and + $j$ are in the same community else 0. + + According to [2]_ (and verified by some algebra) this can be reduced to + + .. math:: + Q = \sum_{c=1}^{n} + \left[ \frac{L_c}{m} - \gamma\left( \frac{k_c}{2m} \right) ^2 \right] + + where the sum iterates over all communities $c$, $m$ is the number of edges, + $L_c$ is the number of intra-community links for community $c$, + $k_c$ is the sum of degrees of the nodes in community $c$, + and $\gamma$ is the resolution parameter. + + The resolution parameter sets an arbitrary tradeoff between intra-group + edges and inter-group edges. More complex grouping patterns can be + discovered by analyzing the same network with multiple values of gamma + and then combining the results [3]_. That said, it is very common to + simply use gamma=1. More on the choice of gamma is in [4]_. + + The second formula is the one actually used in calculation of the modularity. + For directed graphs the second formula replaces $k_c$ with $k^{in}_c k^{out}_c$. + + Parameters + ---------- + G : NetworkX Graph + + communities : list or iterable of set of nodes + These node sets must represent a partition of G's nodes. + + weight : string or None, optional (default="weight") + The edge attribute that holds the numerical value used + as a weight. If None or an edge does not have that attribute, + then that edge has weight 1. + + resolution : float (default=1) + If resolution is less than 1, modularity favors larger communities. + Greater than 1 favors smaller communities. + + Returns + ------- + Q : float + The modularity of the partition. + + Raises + ------ + NotAPartition + If `communities` is not a partition of the nodes of `G`. + + Examples + -------- + >>> G = nx.barbell_graph(3, 0) + >>> nx.community.modularity(G, [{0, 1, 2}, {3, 4, 5}]) + 0.35714285714285715 + >>> nx.community.modularity(G, nx.community.label_propagation_communities(G)) + 0.35714285714285715 + + References + ---------- + .. [1] M. E. J. Newman "Networks: An Introduction", page 224. + Oxford University Press, 2011. + .. [2] Clauset, Aaron, Mark EJ Newman, and Cristopher Moore. + "Finding community structure in very large networks." + Phys. Rev. E 70.6 (2004). + .. [3] Reichardt and Bornholdt "Statistical Mechanics of Community Detection" + Phys. Rev. E 74, 016110, 2006. https://doi.org/10.1103/PhysRevE.74.016110 + .. [4] M. E. J. Newman, "Equivalence between modularity optimization and + maximum likelihood methods for community detection" + Phys. Rev. E 94, 052315, 2016. https://doi.org/10.1103/PhysRevE.94.052315 + .. [5] Blondel, V.D. et al. "Fast unfolding of communities in large + networks" J. Stat. Mech 10008, 1-12 (2008). + https://doi.org/10.1088/1742-5468/2008/10/P10008 + """ + if not isinstance(communities, list): + communities = list(communities) + if not is_partition(G, communities): + raise NotAPartition(G, communities) + + directed = G.is_directed() + if directed: + out_degree = dict(G.out_degree(weight=weight)) + in_degree = dict(G.in_degree(weight=weight)) + m = sum(out_degree.values()) + norm = 1 / m**2 + else: + out_degree = in_degree = dict(G.degree(weight=weight)) + deg_sum = sum(out_degree.values()) + m = deg_sum / 2 + norm = 1 / deg_sum**2 + + def community_contribution(community): + comm = set(community) + L_c = sum(wt for u, v, wt in G.edges(comm, data=weight, default=1) if v in comm) + + out_degree_sum = sum(out_degree[u] for u in comm) + in_degree_sum = sum(in_degree[u] for u in comm) if directed else out_degree_sum + + return L_c / m - resolution * out_degree_sum * in_degree_sum * norm + + return sum(map(community_contribution, communities)) + + +@require_partition +@nx._dispatchable +def partition_quality(G, partition): + """Returns the coverage and performance of a partition of G. + + The *coverage* of a partition is the ratio of the number of + intra-community edges to the total number of edges in the graph. + + The *performance* of a partition is the number of + intra-community edges plus inter-community non-edges divided by the total + number of potential edges. + + This algorithm has complexity $O(C^2 + L)$ where C is the number of + communities and L is the number of links. + + Parameters + ---------- + G : NetworkX graph + + partition : sequence + Partition of the nodes of `G`, represented as a sequence of + sets of nodes (blocks). Each block of the partition represents a + community. + + Returns + ------- + (float, float) + The (coverage, performance) tuple of the partition, as defined above. + + Raises + ------ + NetworkXError + If `partition` is not a valid partition of the nodes of `G`. + + Notes + ----- + If `G` is a multigraph; + - for coverage, the multiplicity of edges is counted + - for performance, the result is -1 (total number of possible edges is not defined) + + References + ---------- + .. [1] Santo Fortunato. + "Community Detection in Graphs". + *Physical Reports*, Volume 486, Issue 3--5 pp. 75--174 + + """ + + node_community = {} + for i, community in enumerate(partition): + for node in community: + node_community[node] = i + + # `performance` is not defined for multigraphs + if not G.is_multigraph(): + # Iterate over the communities, quadratic, to calculate `possible_inter_community_edges` + possible_inter_community_edges = sum( + len(p1) * len(p2) for p1, p2 in combinations(partition, 2) + ) + + if G.is_directed(): + possible_inter_community_edges *= 2 + else: + possible_inter_community_edges = 0 + + # Compute the number of edges in the complete graph -- `n` nodes, + # directed or undirected, depending on `G` + n = len(G) + total_pairs = n * (n - 1) + if not G.is_directed(): + total_pairs //= 2 + + intra_community_edges = 0 + inter_community_non_edges = possible_inter_community_edges + + # Iterate over the links to count `intra_community_edges` and `inter_community_non_edges` + for e in G.edges(): + if node_community[e[0]] == node_community[e[1]]: + intra_community_edges += 1 + else: + inter_community_non_edges -= 1 + + coverage = intra_community_edges / len(G.edges) + + if G.is_multigraph(): + performance = -1.0 + else: + performance = (intra_community_edges + inter_community_non_edges) / total_pairs + + return coverage, performance diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..71639e7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-312.pyc new file mode 100644 index 0000000..e71797d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_asyn_fluid.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-312.pyc new file mode 100644 index 0000000..47ffd00 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_centrality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_divisive.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_divisive.cpython-312.pyc new file mode 100644 index 0000000..c7caca4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_divisive.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-312.pyc new file mode 100644 index 0000000..9c5fda4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_kclique.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-312.pyc new file mode 100644 index 0000000..36eaaf9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_kernighan_lin.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-312.pyc new file mode 100644 index 0000000..48ed8e6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_label_propagation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_leiden.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_leiden.cpython-312.pyc new file mode 100644 index 0000000..aec189c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_leiden.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_local.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_local.cpython-312.pyc new file mode 100644 index 0000000..ee8c26b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_local.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-312.pyc new file mode 100644 index 0000000..7ab8ea8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_louvain.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-312.pyc new file mode 100644 index 0000000..be2a2fc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_lukes.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-312.pyc new file mode 100644 index 0000000..ae18219 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_modularity_max.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-312.pyc new file mode 100644 index 0000000..65c9ece Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_quality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-312.pyc new file mode 100644 index 0000000..f304f2c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/__pycache__/test_utils.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py new file mode 100644 index 0000000..6c023be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_asyn_fluid.py @@ -0,0 +1,136 @@ +import pytest + +import networkx as nx +from networkx import Graph, NetworkXError +from networkx.algorithms.community import asyn_fluidc + + +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.community.asyn_fluidc(G, 1) + + +def test_exceptions(): + test = Graph() + test.add_node("a") + pytest.raises(NetworkXError, asyn_fluidc, test, "hi") + pytest.raises(NetworkXError, asyn_fluidc, test, -1) + pytest.raises(NetworkXError, asyn_fluidc, test, 3) + test.add_node("b") + pytest.raises(NetworkXError, asyn_fluidc, test, 1) + + +def test_single_node(): + test = Graph() + + test.add_node("a") + + # ground truth + ground_truth = {frozenset(["a"])} + + communities = asyn_fluidc(test, 1) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + +def test_two_nodes(): + test = Graph() + + test.add_edge("a", "b") + + # ground truth + ground_truth = {frozenset(["a"]), frozenset(["b"])} + + communities = asyn_fluidc(test, 2) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + +def test_two_clique_communities(): + test = Graph() + + # c1 + test.add_edge("a", "b") + test.add_edge("a", "c") + test.add_edge("b", "c") + + # connection + test.add_edge("c", "d") + + # c2 + test.add_edge("d", "e") + test.add_edge("d", "f") + test.add_edge("f", "e") + + # ground truth + ground_truth = {frozenset(["a", "c", "b"]), frozenset(["e", "d", "f"])} + + communities = asyn_fluidc(test, 2, seed=7) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + +def test_five_clique_ring(): + test = Graph() + + # c1 + test.add_edge("1a", "1b") + test.add_edge("1a", "1c") + test.add_edge("1a", "1d") + test.add_edge("1b", "1c") + test.add_edge("1b", "1d") + test.add_edge("1c", "1d") + + # c2 + test.add_edge("2a", "2b") + test.add_edge("2a", "2c") + test.add_edge("2a", "2d") + test.add_edge("2b", "2c") + test.add_edge("2b", "2d") + test.add_edge("2c", "2d") + + # c3 + test.add_edge("3a", "3b") + test.add_edge("3a", "3c") + test.add_edge("3a", "3d") + test.add_edge("3b", "3c") + test.add_edge("3b", "3d") + test.add_edge("3c", "3d") + + # c4 + test.add_edge("4a", "4b") + test.add_edge("4a", "4c") + test.add_edge("4a", "4d") + test.add_edge("4b", "4c") + test.add_edge("4b", "4d") + test.add_edge("4c", "4d") + + # c5 + test.add_edge("5a", "5b") + test.add_edge("5a", "5c") + test.add_edge("5a", "5d") + test.add_edge("5b", "5c") + test.add_edge("5b", "5d") + test.add_edge("5c", "5d") + + # connections + test.add_edge("1a", "2c") + test.add_edge("2a", "3c") + test.add_edge("3a", "4c") + test.add_edge("4a", "5c") + test.add_edge("5a", "1c") + + # ground truth + ground_truth = { + frozenset(["1a", "1b", "1c", "1d"]), + frozenset(["2a", "2b", "2c", "2d"]), + frozenset(["3a", "3b", "3c", "3d"]), + frozenset(["4a", "4b", "4c", "4d"]), + frozenset(["5a", "5b", "5c", "5d"]), + } + + communities = asyn_fluidc(test, 5, seed=9) + result = {frozenset(c) for c in communities} + assert result == ground_truth diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_centrality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_centrality.py new file mode 100644 index 0000000..1a8982f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_centrality.py @@ -0,0 +1,85 @@ +"""Unit tests for the :mod:`networkx.algorithms.community.centrality` +module. + +""" + +from operator import itemgetter + +import networkx as nx + + +def set_of_sets(iterable): + return set(map(frozenset, iterable)) + + +def validate_communities(result, expected): + assert set_of_sets(result) == set_of_sets(expected) + + +def validate_possible_communities(result, *expected): + assert any(set_of_sets(result) == set_of_sets(p) for p in expected) + + +class TestGirvanNewman: + """Unit tests for the + :func:`networkx.algorithms.community.centrality.girvan_newman` + function. + + """ + + def test_no_edges(self): + G = nx.empty_graph(3) + communities = list(nx.community.girvan_newman(G)) + assert len(communities) == 1 + validate_communities(communities[0], [{0}, {1}, {2}]) + + def test_undirected(self): + # Start with the graph .-.-.-. + G = nx.path_graph(4) + communities = list(nx.community.girvan_newman(G)) + assert len(communities) == 3 + # After one removal, we get the graph .-. .-. + validate_communities(communities[0], [{0, 1}, {2, 3}]) + # After the next, we get the graph .-. . ., but there are two + # symmetric possible versions. + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) + # After the last removal, we always get the empty graph. + validate_communities(communities[2], [{0}, {1}, {2}, {3}]) + + def test_directed(self): + G = nx.DiGraph(nx.path_graph(4)) + communities = list(nx.community.girvan_newman(G)) + assert len(communities) == 3 + validate_communities(communities[0], [{0, 1}, {2, 3}]) + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) + validate_communities(communities[2], [{0}, {1}, {2}, {3}]) + + def test_selfloops(self): + G = nx.path_graph(4) + G.add_edge(0, 0) + G.add_edge(2, 2) + communities = list(nx.community.girvan_newman(G)) + assert len(communities) == 3 + validate_communities(communities[0], [{0, 1}, {2, 3}]) + validate_possible_communities( + communities[1], [{0}, {1}, {2, 3}], [{0, 1}, {2}, {3}] + ) + validate_communities(communities[2], [{0}, {1}, {2}, {3}]) + + def test_most_valuable_edge(self): + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, 3), (1, 2, 2), (2, 3, 1)]) + # Let the most valuable edge be the one with the highest weight. + + def heaviest(G): + return max(G.edges(data="weight"), key=itemgetter(2))[:2] + + communities = list(nx.community.girvan_newman(G, heaviest)) + assert len(communities) == 3 + validate_communities(communities[0], [{0}, {1, 2, 3}]) + validate_communities(communities[1], [{0}, {1}, {2, 3}]) + validate_communities(communities[2], [{0}, {1}, {2}, {3}]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_divisive.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_divisive.py new file mode 100644 index 0000000..6331503 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_divisive.py @@ -0,0 +1,106 @@ +import pytest + +import networkx as nx + + +def test_edge_betweenness_partition(): + G = nx.barbell_graph(3, 0) + C = nx.community.edge_betweenness_partition(G, 2) + answer = [{0, 1, 2}, {3, 4, 5}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + G = nx.barbell_graph(3, 1) + C = nx.community.edge_betweenness_partition(G, 3) + answer = [{0, 1, 2}, {4, 5, 6}, {3}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_betweenness_partition(G, 7) + answer = [{n} for n in G] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_betweenness_partition(G, 1) + assert C == [set(G)] + + C = nx.community.edge_betweenness_partition(G, 1, weight="weight") + assert C == [set(G)] + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, 0) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, -1) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_betweenness_partition(G, 10) + + +def test_edge_current_flow_betweenness_partition(): + pytest.importorskip("scipy") + + G = nx.barbell_graph(3, 0) + C = nx.community.edge_current_flow_betweenness_partition(G, 2) + answer = [{0, 1, 2}, {3, 4, 5}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + G = nx.barbell_graph(3, 1) + C = nx.community.edge_current_flow_betweenness_partition(G, 2) + answers = [[{0, 1, 2, 3}, {4, 5, 6}], [{0, 1, 2}, {3, 4, 5, 6}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) + + C = nx.community.edge_current_flow_betweenness_partition(G, 3) + answer = [{0, 1, 2}, {4, 5, 6}, {3}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 4) + answers = [[{1, 2}, {4, 5, 6}, {3}, {0}], [{0, 1, 2}, {5, 6}, {3}, {4}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) + + C = nx.community.edge_current_flow_betweenness_partition(G, 5) + answer = [{1, 2}, {5, 6}, {3}, {0}, {4}] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 6) + answers = [[{2}, {5, 6}, {3}, {0}, {4}, {1}], [{1, 2}, {6}, {3}, {0}, {4}, {5}]] + assert len(C) == len(answers[0]) + assert any(all(s in answer for s in C) for answer in answers) + + C = nx.community.edge_current_flow_betweenness_partition(G, 7) + answer = [{n} for n in G] + assert len(C) == len(answer) + for s in answer: + assert s in C + + C = nx.community.edge_current_flow_betweenness_partition(G, 1) + assert C == [set(G)] + + C = nx.community.edge_current_flow_betweenness_partition(G, 1, weight="weight") + assert C == [set(G)] + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, 0) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, -1) + + with pytest.raises(nx.NetworkXError): + nx.community.edge_current_flow_betweenness_partition(G, 10) + + N = 10 + G = nx.empty_graph(N) + for i in range(2, N - 1): + C = nx.community.edge_current_flow_betweenness_partition(G, i) + assert C == [{n} for n in G] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kclique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kclique.py new file mode 100644 index 0000000..aa0b7e8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kclique.py @@ -0,0 +1,91 @@ +from itertools import combinations + +import pytest + +import networkx as nx + + +def test_overlapping_K5(): + G = nx.Graph() + G.add_edges_from(combinations(range(5), 2)) # Add a five clique + G.add_edges_from(combinations(range(2, 7), 2)) # Add another five clique + c = list(nx.community.k_clique_communities(G, 4)) + assert c == [frozenset(range(7))] + c = set(nx.community.k_clique_communities(G, 5)) + assert c == {frozenset(range(5)), frozenset(range(2, 7))} + + +def test_isolated_K5(): + G = nx.Graph() + G.add_edges_from(combinations(range(5), 2)) # Add a five clique + G.add_edges_from(combinations(range(5, 10), 2)) # Add another five clique + c = set(nx.community.k_clique_communities(G, 5)) + assert c == {frozenset(range(5)), frozenset(range(5, 10))} + + +class TestZacharyKarateClub: + def setup_method(self): + self.G = nx.karate_club_graph() + + def _check_communities(self, k, expected): + communities = set(nx.community.k_clique_communities(self.G, k)) + assert communities == expected + + def test_k2(self): + # clique percolation with k=2 is just connected components + expected = {frozenset(self.G)} + self._check_communities(2, expected) + + def test_k3(self): + comm1 = [ + 0, + 1, + 2, + 3, + 7, + 8, + 12, + 13, + 14, + 15, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + ] + comm2 = [0, 4, 5, 6, 10, 16] + comm3 = [24, 25, 31] + expected = {frozenset(comm1), frozenset(comm2), frozenset(comm3)} + self._check_communities(3, expected) + + def test_k4(self): + expected = { + frozenset([0, 1, 2, 3, 7, 13]), + frozenset([8, 32, 30, 33]), + frozenset([32, 33, 29, 23]), + } + self._check_communities(4, expected) + + def test_k5(self): + expected = {frozenset([0, 1, 2, 3, 7, 13])} + self._check_communities(5, expected) + + def test_k6(self): + expected = set() + self._check_communities(6, expected) + + +def test_bad_k(): + with pytest.raises(nx.NetworkXError): + list(nx.community.k_clique_communities(nx.Graph(), 1)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py new file mode 100644 index 0000000..25d53d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_kernighan_lin.py @@ -0,0 +1,92 @@ +"""Unit tests for the :mod:`networkx.algorithms.community.kernighan_lin` +module. +""" + +from itertools import permutations + +import pytest + +import networkx as nx +from networkx.algorithms.community import kernighan_lin_bisection + + +def assert_partition_equal(x, y): + assert set(map(frozenset, x)) == set(map(frozenset, y)) + + +def test_partition(): + G = nx.barbell_graph(3, 0) + C = kernighan_lin_bisection(G) + assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}]) + + +def test_partition_argument(): + G = nx.barbell_graph(3, 0) + partition = [{0, 1, 2}, {3, 4, 5}] + C = kernighan_lin_bisection(G, partition) + assert_partition_equal(C, partition) + + +def test_partition_argument_non_integer_nodes(): + G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + partition = ({"A", "B"}, {"C", "D"}) + C = kernighan_lin_bisection(G, partition) + assert_partition_equal(C, partition) + + +def test_seed_argument(): + G = nx.barbell_graph(3, 0) + C = kernighan_lin_bisection(G, seed=1) + assert_partition_equal(C, [{0, 1, 2}, {3, 4, 5}]) + + +def test_non_disjoint_partition(): + with pytest.raises(nx.NetworkXError): + G = nx.barbell_graph(3, 0) + partition = ({0, 1, 2}, {2, 3, 4, 5}) + kernighan_lin_bisection(G, partition) + + +def test_too_many_blocks(): + with pytest.raises(nx.NetworkXError): + G = nx.barbell_graph(3, 0) + partition = ({0, 1}, {2}, {3, 4, 5}) + kernighan_lin_bisection(G, partition) + + +def test_multigraph(): + G = nx.cycle_graph(4) + M = nx.MultiGraph(G.edges()) + M.add_edges_from(G.edges()) + M.remove_edge(1, 2) + for labels in permutations(range(4)): + mapping = dict(zip(M, labels)) + A, B = kernighan_lin_bisection(nx.relabel_nodes(M, mapping), seed=0) + assert_partition_equal( + [A, B], [{mapping[0], mapping[1]}, {mapping[2], mapping[3]}] + ) + + +def test_max_iter_argument(): + G = nx.Graph( + [ + ("A", "B", {"weight": 1}), + ("A", "C", {"weight": 2}), + ("A", "D", {"weight": 3}), + ("A", "E", {"weight": 2}), + ("A", "F", {"weight": 4}), + ("B", "C", {"weight": 1}), + ("B", "D", {"weight": 4}), + ("B", "E", {"weight": 2}), + ("B", "F", {"weight": 1}), + ("C", "D", {"weight": 3}), + ("C", "E", {"weight": 2}), + ("C", "F", {"weight": 1}), + ("D", "E", {"weight": 4}), + ("D", "F", {"weight": 3}), + ("E", "F", {"weight": 2}), + ] + ) + partition = ({"A", "B", "C"}, {"D", "E", "F"}) + C = kernighan_lin_bisection(G, partition, max_iter=1) + assert_partition_equal(C, ({"A", "F", "C"}, {"D", "E", "B"})) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_label_propagation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_label_propagation.py new file mode 100644 index 0000000..4be72db --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_label_propagation.py @@ -0,0 +1,241 @@ +from itertools import chain, combinations + +import pytest + +import networkx as nx + + +def test_directed_not_supported(): + with pytest.raises(nx.NetworkXNotImplemented): + # not supported for directed graphs + test = nx.DiGraph() + test.add_edge("a", "b") + test.add_edge("a", "c") + test.add_edge("b", "d") + result = nx.community.label_propagation_communities(test) + + +def test_iterator_vs_iterable(): + G = nx.empty_graph("a") + assert list(nx.community.label_propagation_communities(G)) == [{"a"}] + for community in nx.community.label_propagation_communities(G): + assert community == {"a"} + pytest.raises(TypeError, next, nx.community.label_propagation_communities(G)) + + +def test_one_node(): + test = nx.Graph() + test.add_node("a") + + # The expected communities are: + ground_truth = {frozenset(["a"])} + + communities = nx.community.label_propagation_communities(test) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + +def test_unconnected_communities(): + test = nx.Graph() + # community 1 + test.add_edge("a", "c") + test.add_edge("a", "d") + test.add_edge("d", "c") + # community 2 + test.add_edge("b", "e") + test.add_edge("e", "f") + test.add_edge("f", "b") + + # The expected communities are: + ground_truth = {frozenset(["a", "c", "d"]), frozenset(["b", "e", "f"])} + + communities = nx.community.label_propagation_communities(test) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + +def test_connected_communities(): + test = nx.Graph() + # community 1 + test.add_edge("a", "b") + test.add_edge("c", "a") + test.add_edge("c", "b") + test.add_edge("d", "a") + test.add_edge("d", "b") + test.add_edge("d", "c") + test.add_edge("e", "a") + test.add_edge("e", "b") + test.add_edge("e", "c") + test.add_edge("e", "d") + # community 2 + test.add_edge("1", "2") + test.add_edge("3", "1") + test.add_edge("3", "2") + test.add_edge("4", "1") + test.add_edge("4", "2") + test.add_edge("4", "3") + test.add_edge("5", "1") + test.add_edge("5", "2") + test.add_edge("5", "3") + test.add_edge("5", "4") + # edge between community 1 and 2 + test.add_edge("a", "1") + # community 3 + test.add_edge("x", "y") + # community 4 with only a single node + test.add_node("z") + + # The expected communities are: + ground_truth1 = { + frozenset(["a", "b", "c", "d", "e"]), + frozenset(["1", "2", "3", "4", "5"]), + frozenset(["x", "y"]), + frozenset(["z"]), + } + ground_truth2 = { + frozenset(["a", "b", "c", "d", "e", "1", "2", "3", "4", "5"]), + frozenset(["x", "y"]), + frozenset(["z"]), + } + ground_truth = (ground_truth1, ground_truth2) + + communities = nx.community.label_propagation_communities(test) + result = {frozenset(c) for c in communities} + assert result in ground_truth + + +def test_termination(): + # ensure termination of asyn_lpa_communities in two cases + # that led to an endless loop in a previous version + test1 = nx.karate_club_graph() + test2 = nx.caveman_graph(2, 10) + test2.add_edges_from([(0, 20), (20, 10)]) + nx.community.asyn_lpa_communities(test1) + nx.community.asyn_lpa_communities(test2) + + +class TestAsynLpaCommunities: + def _check_communities(self, G, expected): + """Checks that the communities computed from the given graph ``G`` + using the :func:`~networkx.asyn_lpa_communities` function match + the set of nodes given in ``expected``. + + ``expected`` must be a :class:`set` of :class:`frozenset` + instances, each element of which is a node in the graph. + + """ + communities = nx.community.asyn_lpa_communities(G) + result = {frozenset(c) for c in communities} + assert result == expected + + def test_null_graph(self): + G = nx.null_graph() + ground_truth = set() + self._check_communities(G, ground_truth) + + def test_single_node(self): + G = nx.empty_graph(1) + ground_truth = {frozenset([0])} + self._check_communities(G, ground_truth) + + def test_simple_communities(self): + # This graph is the disjoint union of two triangles. + G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"]) + ground_truth = {frozenset("abc"), frozenset("def")} + self._check_communities(G, ground_truth) + + def test_seed_argument(self): + G = nx.Graph(["ab", "ac", "bc", "de", "df", "fe"]) + ground_truth = {frozenset("abc"), frozenset("def")} + communities = nx.community.asyn_lpa_communities(G, seed=1) + result = {frozenset(c) for c in communities} + assert result == ground_truth + + def test_several_communities(self): + # This graph is the disjoint union of five triangles. + ground_truth = {frozenset(range(3 * i, 3 * (i + 1))) for i in range(5)} + edges = chain.from_iterable(combinations(c, 2) for c in ground_truth) + G = nx.Graph(edges) + self._check_communities(G, ground_truth) + + +class TestFastLabelPropagationCommunities: + N = 100 # number of nodes + K = 15 # average node degree + + def _check_communities(self, G, truth, weight=None, seed=42): + C = nx.community.fast_label_propagation_communities(G, weight=weight, seed=seed) + assert {frozenset(c) for c in C} == truth + + def test_null_graph(self): + G = nx.null_graph() + truth = set() + self._check_communities(G, truth) + + def test_empty_graph(self): + G = nx.empty_graph(self.N) + truth = {frozenset([i]) for i in G} + self._check_communities(G, truth) + + def test_star_graph(self): + G = nx.star_graph(self.N) + truth = {frozenset(G)} + self._check_communities(G, truth) + + def test_complete_graph(self): + G = nx.complete_graph(self.N) + truth = {frozenset(G)} + self._check_communities(G, truth) + + def test_bipartite_graph(self): + G = nx.complete_bipartite_graph(self.N // 2, self.N // 2) + truth = {frozenset(G)} + self._check_communities(G, truth) + + def test_random_graph(self): + G = nx.gnm_random_graph(self.N, self.N * self.K // 2, seed=42) + truth = {frozenset(G)} + self._check_communities(G, truth) + + def test_disjoin_cliques(self): + G = nx.Graph(["ab", "AB", "AC", "BC", "12", "13", "14", "23", "24", "34"]) + truth = {frozenset("ab"), frozenset("ABC"), frozenset("1234")} + self._check_communities(G, truth) + + def test_ring_of_cliques(self): + N, K = self.N, self.K + G = nx.ring_of_cliques(N, K) + truth = {frozenset([K * i + k for k in range(K)]) for i in range(N)} + self._check_communities(G, truth) + + def test_larger_graph(self): + G = nx.gnm_random_graph(100 * self.N, 50 * self.N * self.K, seed=42) + nx.community.fast_label_propagation_communities(G) + + def test_graph_type(self): + G1 = nx.complete_graph(self.N, nx.MultiDiGraph()) + G2 = nx.MultiGraph(G1) + G3 = nx.DiGraph(G1) + G4 = nx.Graph(G1) + truth = {frozenset(G1)} + self._check_communities(G1, truth) + self._check_communities(G2, truth) + self._check_communities(G3, truth) + self._check_communities(G4, truth) + + def test_weight_argument(self): + G = nx.MultiDiGraph() + G.add_edge(1, 2, weight=1.41) + G.add_edge(2, 1, weight=1.41) + G.add_edge(2, 3) + G.add_edge(3, 4, weight=3.14) + truth = {frozenset({1, 2}), frozenset({3, 4})} + self._check_communities(G, truth, weight="weight") + + def test_seed_argument(self): + G = nx.karate_club_graph() + C = nx.community.fast_label_propagation_communities(G, seed=2023) + truth = {frozenset(c) for c in C} + self._check_communities(G, truth, seed=2023) + # smoke test that seed=None works + C = nx.community.fast_label_propagation_communities(G, seed=None) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_leiden.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_leiden.py new file mode 100644 index 0000000..7e61b94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_leiden.py @@ -0,0 +1,138 @@ +import pytest + +import networkx as nx +from networkx.algorithms.community import leiden_communities, leiden_partitions + +# Leiden is not yet implemented by networkx, so only run tests in this file for +# backends that implement Leiden. +no_backends_for_leiden_communities = ( + "not set(nx.config.backend_priority.algos) & leiden_communities.backends" +) + +no_backends_for_leiden_partitions = ( + "not set(nx.config.backend_priority.algos) & leiden_partitions.backends" +) + + +def test_leiden_with_nx_backend(): + G = nx.karate_club_graph() + with pytest.raises(NotImplementedError): + nx.community.leiden_partitions(G, backend="networkx") + with pytest.raises(NotImplementedError): + nx.community.leiden_communities(G, backend="networkx") + + +@pytest.mark.skipif(no_backends_for_leiden_communities) +def test_modularity_increase(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + partition = [{u} for u in G.nodes()] + mod = nx.community.modularity(G, partition) + partition = nx.community.leiden_communities(G) + + assert nx.community.modularity(G, partition) > mod + + +@pytest.mark.skipif(no_backends_for_leiden_communities) +def test_valid_partition(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + partition = nx.community.leiden_communities(G) + + assert nx.community.is_partition(G, partition) + + +@pytest.mark.skipif(no_backends_for_leiden_partitions) +def test_partition_iterator(): + G = nx.path_graph(15) + parts_iter = nx.community.leiden_partitions(G, seed=42) + first_part = next(parts_iter) + first_copy = [s.copy() for s in first_part] + + # check 1st part stays fixed even after 2nd iteration (like gh-5901 in louvain) + assert first_copy[0] == first_part[0] + second_part = next(parts_iter) + assert first_copy[0] == first_part[0] + + +@pytest.mark.skipif(no_backends_for_leiden_communities) +def test_none_weight_param(): + G = nx.karate_club_graph() + nx.set_edge_attributes( + G, {edge: i * i for i, edge in enumerate(G.edges)}, name="foo" + ) + + partition1 = nx.community.leiden_communities(G, weight=None, seed=2) + partition2 = nx.community.leiden_communities(G, weight="foo", seed=2) + partition3 = nx.community.leiden_communities(G, weight="weight", seed=2) + + assert partition1 != partition2 + assert partition2 != partition3 + + +@pytest.mark.skipif(no_backends_for_leiden_communities) +def test_quality(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + H = nx.MultiGraph(G) + + partition = nx.community.leiden_communities(G) + partition2 = nx.community.leiden_communities(H) + + quality = nx.community.partition_quality(G, partition)[0] + quality2 = nx.community.partition_quality(H, partition2)[0] + + assert quality >= 0.65 + assert quality2 >= 0.65 + + +@pytest.mark.skipif(no_backends_for_leiden_communities) +def test_resolution(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + + partition1 = nx.community.leiden_communities(G, resolution=0.5, seed=12) + partition2 = nx.community.leiden_communities(G, seed=12) + partition3 = nx.community.leiden_communities(G, resolution=2, seed=12) + + assert len(partition1) <= len(partition2) + assert len(partition2) <= len(partition3) + + +@pytest.mark.skipif(no_backends_for_leiden_communities) +def test_empty_graph(): + G = nx.Graph() + G.add_nodes_from(range(5)) + expected = [{0}, {1}, {2}, {3}, {4}] + assert nx.community.leiden_communities(G) == expected + + +@pytest.mark.skipif(no_backends_for_leiden_communities) +def test_directed_not_implemented(): + G = nx.cycle_graph(4, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXNotImplemented): + nx.community.leiden_communities(G) + + +@pytest.mark.skipif(no_backends_for_leiden_partitions) +@pytest.mark.skipif(no_backends_for_leiden_communities) +def test_max_level(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + parts_iter = nx.community.leiden_partitions(G, seed=42) + for max_level, expected in enumerate(parts_iter, 1): + partition = nx.community.leiden_communities(G, max_level=max_level, seed=42) + assert partition == expected + assert max_level > 1 # Ensure we are actually testing max_level + # max_level is an upper limit; it's okay if we stop before it's hit. + partition = nx.community.leiden_communities(G, max_level=max_level + 1, seed=42) + assert partition == expected + with pytest.raises( + ValueError, match="max_level argument must be a positive integer" + ): + nx.community.leiden_communities(G, max_level=0) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_local.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_local.py new file mode 100644 index 0000000..d0ec43f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_local.py @@ -0,0 +1,76 @@ +import pytest + +import networkx as nx + + +def test_greedy_source_expansion_karate_club(): + G = nx.karate_club_graph() + + community = nx.community.greedy_source_expansion(G, source=16) + + expected = {0, 4, 5, 6, 10, 16} + + assert community == expected + + +def test_greedy_source_expansion_cutoff(): + G = nx.karate_club_graph() + + community = nx.community.greedy_source_expansion(G, source=16, cutoff=3) + + assert community == {5, 6, 16} + + +def test_greedy_source_expansion_invalid_method(): + G = nx.karate_club_graph() + + with pytest.raises(ValueError): + nx.community.greedy_source_expansion(G, source=16, cutoff=3, method="invalid") + + +def test_greedy_source_expansion_connected_component(): + G_edges = [(0, 2), (0, 1), (1, 0), (2, 1), (2, 0), (3, 4), (4, 3)] + G = nx.Graph(G_edges) + expected = {0, 1, 2} + community = nx.community.greedy_source_expansion(G, source=0) + assert community == expected + + +def test_greedy_source_expansion_directed_graph(): + G_edges = [ + (0, 2), + (0, 1), + (1, 0), + (2, 1), + (2, 0), + (3, 4), + (4, 3), + (4, 5), + (5, 3), + (5, 6), + (0, 6), + ] + G = nx.DiGraph(G_edges) + + expected = {0, 1, 2, 6} + community = nx.community.greedy_source_expansion(G, source=0) + assert community == expected + + +def test_greedy_source_expansion_multigraph(): + G = nx.MultiGraph(nx.karate_club_graph()) + G.add_edge(0, 1) + G.add_edge(0, 9) + + expected = {0, 4, 5, 6, 10, 16} + + community = nx.community.greedy_source_expansion(G, source=16) + + assert community == expected + + +def test_greedy_source_expansion_empty_graph(): + G = nx.Graph() + G.add_nodes_from(range(5)) + expected = {0} + assert nx.community.greedy_source_expansion(G, source=0) == expected diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_louvain.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_louvain.py new file mode 100644 index 0000000..816e6f1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_louvain.py @@ -0,0 +1,264 @@ +import pytest + +import networkx as nx + + +def test_modularity_increase(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + partition = [{u} for u in G.nodes()] + mod = nx.community.modularity(G, partition) + partition = nx.community.louvain_communities(G) + + assert nx.community.modularity(G, partition) > mod + + +def test_valid_partition(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + H = G.to_directed() + partition = nx.community.louvain_communities(G) + partition2 = nx.community.louvain_communities(H) + + assert nx.community.is_partition(G, partition) + assert nx.community.is_partition(H, partition2) + + +def test_karate_club_partition(): + G = nx.karate_club_graph() + part = [ + {0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21}, + {16, 4, 5, 6, 10}, + {23, 25, 27, 28, 24, 31}, + {32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30}, + ] + partition = nx.community.louvain_communities(G, seed=2, weight=None) + + assert part == partition + + +def test_partition_iterator(): + G = nx.path_graph(15) + parts_iter = nx.community.louvain_partitions(G, seed=42) + first_part = next(parts_iter) + first_copy = [s.copy() for s in first_part] + + # gh-5901 reports sets changing after next partition is yielded + assert first_copy[0] == first_part[0] + second_part = next(parts_iter) + assert first_copy[0] == first_part[0] + + +def test_undirected_selfloops(): + G = nx.karate_club_graph() + expected_partition = nx.community.louvain_communities(G, seed=2, weight=None) + part = [ + {0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21}, + {16, 4, 5, 6, 10}, + {23, 25, 27, 28, 24, 31}, + {32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30}, + ] + assert expected_partition == part + + G.add_weighted_edges_from([(i, i, i * 1000) for i in range(9)]) + # large self-loop weight impacts partition + partition = nx.community.louvain_communities(G, seed=2, weight="weight") + assert part != partition + + # small self-loop weights aren't enough to impact partition in this graph + partition = nx.community.louvain_communities(G, seed=2, weight=None) + assert part == partition + + +def test_directed_selfloops(): + G = nx.DiGraph() + G.add_nodes_from(range(11)) + G_edges = [ + (0, 2), + (0, 1), + (1, 0), + (2, 1), + (2, 0), + (3, 4), + (4, 3), + (7, 8), + (8, 7), + (9, 10), + (10, 9), + ] + G.add_edges_from(G_edges) + G_expected_partition = nx.community.louvain_communities(G, seed=123, weight=None) + + G.add_weighted_edges_from([(i, i, i * 1000) for i in range(3)]) + # large self-loop weight impacts partition + G_partition = nx.community.louvain_communities(G, seed=123, weight="weight") + assert G_partition != G_expected_partition + + # small self-loop weights aren't enough to impact partition in this graph + G_partition = nx.community.louvain_communities(G, seed=123, weight=None) + assert G_partition == G_expected_partition + + +def test_directed_partition(): + """ + Test 2 cases that were looping infinitely + from issues #5175 and #5704 + """ + G = nx.DiGraph() + H = nx.DiGraph() + G.add_nodes_from(range(10)) + H.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + G_edges = [ + (0, 2), + (0, 1), + (1, 0), + (2, 1), + (2, 0), + (3, 4), + (4, 3), + (7, 8), + (8, 7), + (9, 10), + (10, 9), + ] + H_edges = [ + (1, 2), + (1, 6), + (1, 9), + (2, 3), + (2, 4), + (2, 5), + (3, 4), + (4, 3), + (4, 5), + (5, 4), + (6, 7), + (6, 8), + (9, 10), + (9, 11), + (10, 11), + (11, 10), + ] + G.add_edges_from(G_edges) + H.add_edges_from(H_edges) + + G_expected_partition = [{0, 1, 2}, {3, 4}, {5}, {6}, {8, 7}, {9, 10}] + G_partition = nx.community.louvain_communities(G, seed=123, weight=None) + + H_expected_partition = [{2, 3, 4, 5}, {8, 1, 6, 7}, {9, 10, 11}] + H_partition = nx.community.louvain_communities(H, seed=123, weight=None) + + assert G_partition == G_expected_partition + assert H_partition == H_expected_partition + + +def test_none_weight_param(): + G = nx.karate_club_graph() + nx.set_edge_attributes( + G, {edge: i * i for i, edge in enumerate(G.edges)}, name="foo" + ) + + part = [ + {0, 1, 2, 3, 7, 9, 11, 12, 13, 17, 19, 21}, + {16, 4, 5, 6, 10}, + {23, 25, 27, 28, 24, 31}, + {32, 33, 8, 14, 15, 18, 20, 22, 26, 29, 30}, + ] + partition1 = nx.community.louvain_communities(G, weight=None, seed=2) + partition2 = nx.community.louvain_communities(G, weight="foo", seed=2) + partition3 = nx.community.louvain_communities(G, weight="weight", seed=2) + + assert part == partition1 + assert part != partition2 + assert part != partition3 + assert partition2 != partition3 + + +def test_quality(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + H = nx.gn_graph(200, seed=1234) + I = nx.MultiGraph(G) + J = nx.MultiDiGraph(H) + + partition = nx.community.louvain_communities(G) + partition2 = nx.community.louvain_communities(H) + partition3 = nx.community.louvain_communities(I) + partition4 = nx.community.louvain_communities(J) + + quality = nx.community.partition_quality(G, partition)[0] + quality2 = nx.community.partition_quality(H, partition2)[0] + quality3 = nx.community.partition_quality(I, partition3)[0] + quality4 = nx.community.partition_quality(J, partition4)[0] + + assert quality >= 0.65 + assert quality2 >= 0.65 + assert quality3 >= 0.65 + assert quality4 >= 0.65 + + +def test_multigraph(): + G = nx.karate_club_graph() + H = nx.MultiGraph(G) + G.add_edge(0, 1, weight=10) + H.add_edge(0, 1, weight=9) + G.add_edge(0, 9, foo=20) + H.add_edge(0, 9, foo=20) + + partition1 = nx.community.louvain_communities(G, seed=1234) + partition2 = nx.community.louvain_communities(H, seed=1234) + partition3 = nx.community.louvain_communities(H, weight="foo", seed=1234) + + assert partition1 == partition2 != partition3 + + +def test_resolution(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + + partition1 = nx.community.louvain_communities(G, resolution=0.5, seed=12) + partition2 = nx.community.louvain_communities(G, seed=12) + partition3 = nx.community.louvain_communities(G, resolution=2, seed=12) + + assert len(partition1) <= len(partition2) <= len(partition3) + + +def test_threshold(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + partition1 = nx.community.louvain_communities(G, threshold=0.3, seed=2) + partition2 = nx.community.louvain_communities(G, seed=2) + mod1 = nx.community.modularity(G, partition1) + mod2 = nx.community.modularity(G, partition2) + + assert mod1 <= mod2 + + +def test_empty_graph(): + G = nx.Graph() + G.add_nodes_from(range(5)) + expected = [{0}, {1}, {2}, {3}, {4}] + assert nx.community.louvain_communities(G) == expected + + +def test_max_level(): + G = nx.LFR_benchmark_graph( + 250, 3, 1.5, 0.009, average_degree=5, min_community=20, seed=10 + ) + parts_iter = nx.community.louvain_partitions(G, seed=42) + for max_level, expected in enumerate(parts_iter, 1): + partition = nx.community.louvain_communities(G, max_level=max_level, seed=42) + assert partition == expected + assert max_level > 1 # Ensure we are actually testing max_level + # max_level is an upper limit; it's okay if we stop before it's hit. + partition = nx.community.louvain_communities(G, max_level=max_level + 1, seed=42) + assert partition == expected + with pytest.raises( + ValueError, match="max_level argument must be a positive integer" + ): + nx.community.louvain_communities(G, max_level=0) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_lukes.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_lukes.py new file mode 100644 index 0000000..cfa48f0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_lukes.py @@ -0,0 +1,152 @@ +from itertools import product + +import pytest + +import networkx as nx + +EWL = "e_weight" +NWL = "n_weight" + + +# first test from the Lukes original paper +def paper_1_case(float_edge_wt=False, explicit_node_wt=True, directed=False): + # problem-specific constants + limit = 3 + + # configuration + if float_edge_wt: + shift = 0.001 + else: + shift = 0 + + if directed: + example_1 = nx.DiGraph() + else: + example_1 = nx.Graph() + + # graph creation + example_1.add_edge(1, 2, **{EWL: 3 + shift}) + example_1.add_edge(1, 4, **{EWL: 2 + shift}) + example_1.add_edge(2, 3, **{EWL: 4 + shift}) + example_1.add_edge(2, 5, **{EWL: 6 + shift}) + + # node weights + if explicit_node_wt: + nx.set_node_attributes(example_1, 1, NWL) + wtu = NWL + else: + wtu = None + + # partitioning + clusters_1 = { + frozenset(x) + for x in nx.community.lukes_partitioning( + example_1, limit, node_weight=wtu, edge_weight=EWL + ) + } + + return clusters_1 + + +# second test from the Lukes original paper +def paper_2_case(explicit_edge_wt=True, directed=False): + # problem specific constants + byte_block_size = 32 + + # configuration + if directed: + example_2 = nx.DiGraph() + else: + example_2 = nx.Graph() + + if explicit_edge_wt: + edic = {EWL: 1} + wtu = EWL + else: + edic = {} + wtu = None + + # graph creation + example_2.add_edge("name", "home_address", **edic) + example_2.add_edge("name", "education", **edic) + example_2.add_edge("education", "bs", **edic) + example_2.add_edge("education", "ms", **edic) + example_2.add_edge("education", "phd", **edic) + example_2.add_edge("name", "telephone", **edic) + example_2.add_edge("telephone", "home", **edic) + example_2.add_edge("telephone", "office", **edic) + example_2.add_edge("office", "no1", **edic) + example_2.add_edge("office", "no2", **edic) + + example_2.nodes["name"][NWL] = 20 + example_2.nodes["education"][NWL] = 10 + example_2.nodes["bs"][NWL] = 1 + example_2.nodes["ms"][NWL] = 1 + example_2.nodes["phd"][NWL] = 1 + example_2.nodes["home_address"][NWL] = 8 + example_2.nodes["telephone"][NWL] = 8 + example_2.nodes["home"][NWL] = 8 + example_2.nodes["office"][NWL] = 4 + example_2.nodes["no1"][NWL] = 1 + example_2.nodes["no2"][NWL] = 1 + + # partitioning + clusters_2 = { + frozenset(x) + for x in nx.community.lukes_partitioning( + example_2, byte_block_size, node_weight=NWL, edge_weight=wtu + ) + } + + return clusters_2 + + +def test_paper_1_case(): + ground_truth = {frozenset([1, 4]), frozenset([2, 3, 5])} + + tf = (True, False) + for flt, nwt, drc in product(tf, tf, tf): + part = paper_1_case(flt, nwt, drc) + assert part == ground_truth + + +def test_paper_2_case(): + ground_truth = { + frozenset(["education", "bs", "ms", "phd"]), + frozenset(["name", "home_address"]), + frozenset(["telephone", "home", "office", "no1", "no2"]), + } + + tf = (True, False) + for ewt, drc in product(tf, tf): + part = paper_2_case(ewt, drc) + assert part == ground_truth + + +def test_mandatory_tree(): + not_a_tree = nx.complete_graph(4) + + with pytest.raises(nx.NotATree): + nx.community.lukes_partitioning(not_a_tree, 5) + + +def test_mandatory_integrality(): + byte_block_size = 32 + + ex_1_broken = nx.DiGraph() + + ex_1_broken.add_edge(1, 2, **{EWL: 3.2}) + ex_1_broken.add_edge(1, 4, **{EWL: 2.4}) + ex_1_broken.add_edge(2, 3, **{EWL: 4.0}) + ex_1_broken.add_edge(2, 5, **{EWL: 6.3}) + + ex_1_broken.nodes[1][NWL] = 1.2 # ! + ex_1_broken.nodes[2][NWL] = 1 + ex_1_broken.nodes[3][NWL] = 1 + ex_1_broken.nodes[4][NWL] = 1 + ex_1_broken.nodes[5][NWL] = 2 + + with pytest.raises(TypeError): + nx.community.lukes_partitioning( + ex_1_broken, byte_block_size, node_weight=NWL, edge_weight=EWL + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_modularity_max.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_modularity_max.py new file mode 100644 index 0000000..0121367 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_modularity_max.py @@ -0,0 +1,340 @@ +import pytest + +import networkx as nx +from networkx.algorithms.community import ( + greedy_modularity_communities, + naive_greedy_modularity_communities, +) + + +@pytest.mark.parametrize( + "func", (greedy_modularity_communities, naive_greedy_modularity_communities) +) +def test_modularity_communities(func): + G = nx.karate_club_graph() + john_a = frozenset( + [8, 14, 15, 18, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33] + ) + mr_hi = frozenset([0, 4, 5, 6, 10, 11, 16, 19]) + overlap = frozenset([1, 2, 3, 7, 9, 12, 13, 17, 21]) + expected = {john_a, overlap, mr_hi} + assert set(func(G, weight=None)) == expected + + +@pytest.mark.parametrize( + "func", (greedy_modularity_communities, naive_greedy_modularity_communities) +) +def test_modularity_communities_categorical_labels(func): + # Using other than 0-starting contiguous integers as node-labels. + G = nx.Graph( + [ + ("a", "b"), + ("a", "c"), + ("b", "c"), + ("b", "d"), # inter-community edge + ("d", "e"), + ("d", "f"), + ("d", "g"), + ("f", "g"), + ("d", "e"), + ("f", "e"), + ] + ) + expected = {frozenset({"f", "g", "e", "d"}), frozenset({"a", "b", "c"})} + assert set(func(G)) == expected + + +def test_greedy_modularity_communities_components(): + # Test for gh-5530 + G = nx.Graph([(0, 1), (2, 3), (4, 5), (5, 6)]) + # usual case with 3 components + assert greedy_modularity_communities(G) == [{4, 5, 6}, {0, 1}, {2, 3}] + # best_n can make the algorithm continue even when modularity goes down + assert greedy_modularity_communities(G, best_n=3) == [{4, 5, 6}, {0, 1}, {2, 3}] + assert greedy_modularity_communities(G, best_n=2) == [{0, 1, 4, 5, 6}, {2, 3}] + assert greedy_modularity_communities(G, best_n=1) == [{0, 1, 2, 3, 4, 5, 6}] + + +def test_greedy_modularity_communities_relabeled(): + # Test for gh-4966 + G = nx.balanced_tree(2, 2) + mapping = {0: "a", 1: "b", 2: "c", 3: "d", 4: "e", 5: "f", 6: "g", 7: "h"} + G = nx.relabel_nodes(G, mapping) + expected = [frozenset({"e", "d", "a", "b"}), frozenset({"c", "f", "g"})] + assert greedy_modularity_communities(G) == expected + + +def test_greedy_modularity_communities_directed(): + G = nx.DiGraph( + [ + ("a", "b"), + ("a", "c"), + ("b", "c"), + ("b", "d"), # inter-community edge + ("d", "e"), + ("d", "f"), + ("d", "g"), + ("f", "g"), + ("d", "e"), + ("f", "e"), + ] + ) + expected = [frozenset({"f", "g", "e", "d"}), frozenset({"a", "b", "c"})] + assert greedy_modularity_communities(G) == expected + + # with loops + G = nx.DiGraph() + G.add_edges_from( + [(1, 1), (1, 2), (1, 3), (2, 3), (1, 4), (4, 4), (5, 5), (4, 5), (4, 6), (5, 6)] + ) + expected = [frozenset({1, 2, 3}), frozenset({4, 5, 6})] + assert greedy_modularity_communities(G) == expected + + +@pytest.mark.parametrize( + "func", (greedy_modularity_communities, naive_greedy_modularity_communities) +) +def test_modularity_communities_weighted(func): + G = nx.balanced_tree(2, 3) + for a, b in G.edges: + if ((a == 1) or (a == 2)) and (b != 0): + G[a][b]["weight"] = 10.0 + else: + G[a][b]["weight"] = 1.0 + + expected = [{0, 1, 3, 4, 7, 8, 9, 10}, {2, 5, 6, 11, 12, 13, 14}] + + assert func(G, weight="weight") == expected + assert func(G, weight="weight", resolution=0.9) == expected + assert func(G, weight="weight", resolution=0.3) == expected + assert func(G, weight="weight", resolution=1.1) != expected + + +def test_modularity_communities_floating_point(): + # check for floating point error when used as key in the mapped_queue dict. + # Test for gh-4992 and gh-5000 + G = nx.Graph() + G.add_weighted_edges_from( + [(0, 1, 12), (1, 4, 71), (2, 3, 15), (2, 4, 10), (3, 6, 13)] + ) + expected = [{0, 1, 4}, {2, 3, 6}] + assert greedy_modularity_communities(G, weight="weight") == expected + assert ( + greedy_modularity_communities(G, weight="weight", resolution=0.99) == expected + ) + + +def test_modularity_communities_directed_weighted(): + G = nx.DiGraph() + G.add_weighted_edges_from( + [ + (1, 2, 5), + (1, 3, 3), + (2, 3, 6), + (2, 6, 1), + (1, 4, 1), + (4, 5, 3), + (4, 6, 7), + (5, 6, 2), + (5, 7, 5), + (5, 8, 4), + (6, 8, 3), + ] + ) + expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})] + assert greedy_modularity_communities(G, weight="weight") == expected + + # A large weight of the edge (2, 6) causes 6 to change group, even if it shares + # only one connection with the new group and 3 with the old one. + G[2][6]["weight"] = 20 + expected = [frozenset({1, 2, 3, 6}), frozenset({4, 5, 7, 8})] + assert greedy_modularity_communities(G, weight="weight") == expected + + +def test_greedy_modularity_communities_multigraph(): + G = nx.MultiGraph() + G.add_edges_from( + [ + (1, 2), + (1, 2), + (1, 3), + (2, 3), + (1, 4), + (2, 4), + (4, 5), + (5, 6), + (5, 7), + (5, 7), + (6, 7), + (7, 8), + (5, 8), + ] + ) + expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})] + assert greedy_modularity_communities(G) == expected + + # Converting (4, 5) into a multi-edge causes node 4 to change group. + G.add_edge(4, 5) + expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})] + assert greedy_modularity_communities(G) == expected + + +def test_greedy_modularity_communities_multigraph_weighted(): + G = nx.MultiGraph() + G.add_weighted_edges_from( + [ + (1, 2, 5), + (1, 2, 3), + (1, 3, 6), + (1, 3, 6), + (2, 3, 4), + (1, 4, 1), + (1, 4, 1), + (2, 4, 3), + (2, 4, 3), + (4, 5, 1), + (5, 6, 3), + (5, 6, 7), + (5, 6, 4), + (5, 7, 9), + (5, 7, 9), + (6, 7, 8), + (7, 8, 2), + (7, 8, 2), + (5, 8, 6), + (5, 8, 6), + ] + ) + expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})] + assert greedy_modularity_communities(G, weight="weight") == expected + + # Adding multi-edge (4, 5, 16) causes node 4 to change group. + G.add_edge(4, 5, weight=16) + expected = [frozenset({4, 5, 6, 7, 8}), frozenset({1, 2, 3})] + assert greedy_modularity_communities(G, weight="weight") == expected + + # Increasing the weight of edge (1, 4) causes node 4 to return to the former group. + G[1][4][1]["weight"] = 3 + expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})] + assert greedy_modularity_communities(G, weight="weight") == expected + + +def test_greed_modularity_communities_multidigraph(): + G = nx.MultiDiGraph() + G.add_edges_from( + [ + (1, 2), + (1, 2), + (3, 1), + (2, 3), + (2, 3), + (3, 2), + (1, 4), + (2, 4), + (4, 2), + (4, 5), + (5, 6), + (5, 6), + (6, 5), + (5, 7), + (6, 7), + (7, 8), + (5, 8), + (8, 4), + ] + ) + expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})] + assert greedy_modularity_communities(G, weight="weight") == expected + + +def test_greed_modularity_communities_multidigraph_weighted(): + G = nx.MultiDiGraph() + G.add_weighted_edges_from( + [ + (1, 2, 5), + (1, 2, 3), + (3, 1, 6), + (1, 3, 6), + (3, 2, 4), + (1, 4, 2), + (1, 4, 5), + (2, 4, 3), + (3, 2, 8), + (4, 2, 3), + (4, 3, 5), + (4, 5, 2), + (5, 6, 3), + (5, 6, 7), + (6, 5, 4), + (5, 7, 9), + (5, 7, 9), + (7, 6, 8), + (7, 8, 2), + (8, 7, 2), + (5, 8, 6), + (5, 8, 6), + ] + ) + expected = [frozenset({1, 2, 3, 4}), frozenset({5, 6, 7, 8})] + assert greedy_modularity_communities(G, weight="weight") == expected + + +def test_resolution_parameter_impact(): + G = nx.barbell_graph(5, 3) + + gamma = 1 + expected = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))] + assert greedy_modularity_communities(G, resolution=gamma) == expected + assert naive_greedy_modularity_communities(G, resolution=gamma) == expected + + gamma = 2.5 + expected = [{0, 1, 2, 3}, {9, 10, 11, 12}, {5, 6, 7}, {4}, {8}] + assert greedy_modularity_communities(G, resolution=gamma) == expected + assert naive_greedy_modularity_communities(G, resolution=gamma) == expected + + gamma = 0.3 + expected = [frozenset(range(8)), frozenset(range(8, 13))] + assert greedy_modularity_communities(G, resolution=gamma) == expected + assert naive_greedy_modularity_communities(G, resolution=gamma) == expected + + +def test_cutoff_parameter(): + G = nx.circular_ladder_graph(4) + + # No aggregation: + expected = [{k} for k in range(8)] + assert greedy_modularity_communities(G, cutoff=8) == expected + + # Aggregation to half order (number of nodes) + expected = [{k, k + 1} for k in range(0, 8, 2)] + assert greedy_modularity_communities(G, cutoff=4) == expected + + # Default aggregation case (here, 2 communities emerge) + expected = [frozenset(range(4)), frozenset(range(4, 8))] + assert greedy_modularity_communities(G, cutoff=1) == expected + + +def test_best_n(): + G = nx.barbell_graph(5, 3) + + # Same result as without enforcing cutoff: + best_n = 3 + expected = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))] + assert greedy_modularity_communities(G, best_n=best_n) == expected + + # One additional merging step: + best_n = 2 + expected = [frozenset(range(8)), frozenset(range(8, 13))] + assert greedy_modularity_communities(G, best_n=best_n) == expected + + # Two additional merging steps: + best_n = 1 + expected = [frozenset(range(13))] + assert greedy_modularity_communities(G, best_n=best_n) == expected + + +def test_greedy_modularity_communities_corner_cases(): + G = nx.empty_graph() + assert nx.community.greedy_modularity_communities(G) == [] + G.add_nodes_from(range(3)) + assert nx.community.greedy_modularity_communities(G) == [{0}, {1}, {2}] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_quality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_quality.py new file mode 100644 index 0000000..c502c7e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_quality.py @@ -0,0 +1,139 @@ +"""Unit tests for the :mod:`networkx.algorithms.community.quality` +module. + +""" + +import pytest + +import networkx as nx +from networkx import barbell_graph +from networkx.algorithms.community import modularity, partition_quality +from networkx.algorithms.community.quality import inter_community_edges + + +class TestPerformance: + """Unit tests for the :func:`performance` function.""" + + def test_bad_partition(self): + """Tests that a poor partition has a low performance measure.""" + G = barbell_graph(3, 0) + partition = [{0, 1, 4}, {2, 3, 5}] + assert 8 / 15 == pytest.approx(partition_quality(G, partition)[1], abs=1e-7) + + def test_good_partition(self): + """Tests that a good partition has a high performance measure.""" + G = barbell_graph(3, 0) + partition = [{0, 1, 2}, {3, 4, 5}] + assert 14 / 15 == pytest.approx(partition_quality(G, partition)[1], abs=1e-7) + + +class TestCoverage: + """Unit tests for the :func:`coverage` function.""" + + def test_bad_partition(self): + """Tests that a poor partition has a low coverage measure.""" + G = barbell_graph(3, 0) + partition = [{0, 1, 4}, {2, 3, 5}] + assert 3 / 7 == pytest.approx(partition_quality(G, partition)[0], abs=1e-7) + + def test_good_partition(self): + """Tests that a good partition has a high coverage measure.""" + G = barbell_graph(3, 0) + partition = [{0, 1, 2}, {3, 4, 5}] + assert 6 / 7 == pytest.approx(partition_quality(G, partition)[0], abs=1e-7) + + +def test_modularity(): + G = nx.barbell_graph(3, 0) + C = [{0, 1, 4}, {2, 3, 5}] + assert (-16 / (14**2)) == pytest.approx(modularity(G, C), abs=1e-7) + C = [{0, 1, 2}, {3, 4, 5}] + assert (35 * 2) / (14**2) == pytest.approx(modularity(G, C), abs=1e-7) + + n = 1000 + G = nx.erdos_renyi_graph(n, 0.09, seed=42, directed=True) + C = [set(range(n // 2)), set(range(n // 2, n))] + assert 0.00017154251389292754 == pytest.approx(modularity(G, C), abs=1e-7) + + G = nx.margulis_gabber_galil_graph(10) + mid_value = G.number_of_nodes() // 2 + nodes = list(G.nodes) + C = [set(nodes[:mid_value]), set(nodes[mid_value:])] + assert 0.13 == pytest.approx(modularity(G, C), abs=1e-7) + + G = nx.DiGraph() + G.add_edges_from([(2, 1), (2, 3), (3, 4)]) + C = [{1, 2}, {3, 4}] + assert 2 / 9 == pytest.approx(modularity(G, C), abs=1e-7) + + +def test_modularity_resolution(): + G = nx.barbell_graph(3, 0) + C = [{0, 1, 4}, {2, 3, 5}] + assert modularity(G, C) == pytest.approx(3 / 7 - 100 / 14**2) + gamma = 2 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx(3 / 7 - gamma * 100 / 14**2) + gamma = 0.2 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx(3 / 7 - gamma * 100 / 14**2) + + C = [{0, 1, 2}, {3, 4, 5}] + assert modularity(G, C) == pytest.approx(6 / 7 - 98 / 14**2) + gamma = 2 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx(6 / 7 - gamma * 98 / 14**2) + gamma = 0.2 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx(6 / 7 - gamma * 98 / 14**2) + + G = nx.barbell_graph(5, 3) + C = [frozenset(range(5)), frozenset(range(8, 13)), frozenset(range(5, 8))] + gamma = 1 + result = modularity(G, C, resolution=gamma) + # This C is maximal for gamma=1: modularity = 0.518229 + assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2))) + gamma = 2 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2))) + gamma = 0.2 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx((22 / 24) - gamma * (918 / (48**2))) + + C = [{0, 1, 2, 3}, {9, 10, 11, 12}, {5, 6, 7}, {4}, {8}] + gamma = 1 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2))) + gamma = 2.5 + result = modularity(G, C, resolution=gamma) + # This C is maximal for gamma=2.5: modularity = -0.06553819 + assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2))) + gamma = 0.2 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx((14 / 24) - gamma * (598 / (48**2))) + + C = [frozenset(range(8)), frozenset(range(8, 13))] + gamma = 1 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2))) + gamma = 2 + result = modularity(G, C, resolution=gamma) + assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2))) + gamma = 0.3 + result = modularity(G, C, resolution=gamma) + # This C is maximal for gamma=0.3: modularity = 0.805990 + assert result == pytest.approx((23 / 24) - gamma * (1170 / (48**2))) + + +def test_inter_community_edges_with_digraphs(): + G = nx.complete_graph(2, create_using=nx.DiGraph()) + partition = [{0}, {1}] + assert inter_community_edges(G, partition) == 2 + + G = nx.complete_graph(10, create_using=nx.DiGraph()) + partition = [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}] + assert inter_community_edges(G, partition) == 70 + + G = nx.cycle_graph(4, create_using=nx.DiGraph()) + partition = [{0, 1}, {2, 3}] + assert inter_community_edges(G, partition) == 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_utils.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_utils.py new file mode 100644 index 0000000..ea019db --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/community/tests/test_utils.py @@ -0,0 +1,26 @@ +"""Unit tests for the :mod:`networkx.algorithms.community.utils` module.""" + +import networkx as nx + + +def test_is_partition(): + G = nx.empty_graph(3) + assert nx.community.is_partition(G, [{0, 1}, {2}]) + assert nx.community.is_partition(G, ({0, 1}, {2})) + assert nx.community.is_partition(G, ([0, 1], [2])) + assert nx.community.is_partition(G, [[0, 1], [2]]) + + +def test_not_covering(): + G = nx.empty_graph(3) + assert not nx.community.is_partition(G, [{0}, {1}]) + + +def test_not_disjoint(): + G = nx.empty_graph(3) + assert not nx.community.is_partition(G, [{0, 1}, {1, 2}]) + + +def test_not_node(): + G = nx.empty_graph(3) + assert not nx.community.is_partition(G, [{0, 1}, {3}]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__init__.py new file mode 100644 index 0000000..f9ae2ca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__init__.py @@ -0,0 +1,6 @@ +from .connected import * +from .strongly_connected import * +from .weakly_connected import * +from .attracting import * +from .biconnected import * +from .semiconnected import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..b2485a3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-312.pyc new file mode 100644 index 0000000..af32275 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/attracting.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-312.pyc new file mode 100644 index 0000000..097cfe7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/biconnected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-312.pyc new file mode 100644 index 0000000..109900d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/connected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-312.pyc new file mode 100644 index 0000000..d166efb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/semiconnected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-312.pyc new file mode 100644 index 0000000..46d3a20 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/strongly_connected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-312.pyc new file mode 100644 index 0000000..1cacc39 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/__pycache__/weakly_connected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/attracting.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/attracting.py new file mode 100644 index 0000000..3d77cd9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/attracting.py @@ -0,0 +1,115 @@ +"""Attracting components.""" + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + +__all__ = [ + "number_attracting_components", + "attracting_components", + "is_attracting_component", +] + + +@not_implemented_for("undirected") +@nx._dispatchable +def attracting_components(G): + """Generates the attracting components in `G`. + + An attracting component in a directed graph `G` is a strongly connected + component with the property that a random walker on the graph will never + leave the component, once it enters the component. + + The nodes in attracting components can also be thought of as recurrent + nodes. If a random walker enters the attractor containing the node, then + the node will be visited infinitely often. + + To obtain induced subgraphs on each component use: + ``(G.subgraph(c).copy() for c in attracting_components(G))`` + + Parameters + ---------- + G : DiGraph, MultiDiGraph + The graph to be analyzed. + + Returns + ------- + attractors : generator of sets + A generator of sets of nodes, one for each attracting component of G. + + Raises + ------ + NetworkXNotImplemented + If the input graph is undirected. + + See Also + -------- + number_attracting_components + is_attracting_component + + """ + scc = list(nx.strongly_connected_components(G)) + cG = nx.condensation(G, scc) + for n in cG: + if cG.out_degree(n) == 0: + yield scc[n] + + +@not_implemented_for("undirected") +@nx._dispatchable +def number_attracting_components(G): + """Returns the number of attracting components in `G`. + + Parameters + ---------- + G : DiGraph, MultiDiGraph + The graph to be analyzed. + + Returns + ------- + n : int + The number of attracting components in G. + + Raises + ------ + NetworkXNotImplemented + If the input graph is undirected. + + See Also + -------- + attracting_components + is_attracting_component + + """ + return sum(1 for ac in attracting_components(G)) + + +@not_implemented_for("undirected") +@nx._dispatchable +def is_attracting_component(G): + """Returns True if `G` consists of a single attracting component. + + Parameters + ---------- + G : DiGraph, MultiDiGraph + The graph to be analyzed. + + Returns + ------- + attracting : bool + True if `G` has a single attracting component. Otherwise, False. + + Raises + ------ + NetworkXNotImplemented + If the input graph is undirected. + + See Also + -------- + attracting_components + number_attracting_components + + """ + ac = list(attracting_components(G)) + if len(ac) == 1: + return len(ac[0]) == len(G) + return False diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/biconnected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/biconnected.py new file mode 100644 index 0000000..fd0f386 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/biconnected.py @@ -0,0 +1,394 @@ +"""Biconnected components and articulation points.""" + +from itertools import chain + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + +__all__ = [ + "biconnected_components", + "biconnected_component_edges", + "is_biconnected", + "articulation_points", +] + + +@not_implemented_for("directed") +@nx._dispatchable +def is_biconnected(G): + """Returns True if the graph is biconnected, False otherwise. + + A graph is biconnected if, and only if, it cannot be disconnected by + removing only one node (and all edges incident on that node). If + removing a node increases the number of disconnected components + in the graph, that node is called an articulation point, or cut + vertex. A biconnected graph has no articulation points. + + Parameters + ---------- + G : NetworkX Graph + An undirected graph. + + Returns + ------- + biconnected : bool + True if the graph is biconnected, False otherwise. + + Raises + ------ + NetworkXNotImplemented + If the input graph is not undirected. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> print(nx.is_biconnected(G)) + False + >>> G.add_edge(0, 3) + >>> print(nx.is_biconnected(G)) + True + + See Also + -------- + biconnected_components + articulation_points + biconnected_component_edges + is_strongly_connected + is_weakly_connected + is_connected + is_semiconnected + + Notes + ----- + The algorithm to find articulation points and biconnected + components is implemented using a non-recursive depth-first-search + (DFS) that keeps track of the highest level that back edges reach + in the DFS tree. A node `n` is an articulation point if, and only + if, there exists a subtree rooted at `n` such that there is no + back edge from any successor of `n` that links to a predecessor of + `n` in the DFS tree. By keeping track of all the edges traversed + by the DFS we can obtain the biconnected components because all + edges of a bicomponent will be traversed consecutively between + articulation points. + + References + ---------- + .. [1] Hopcroft, J.; Tarjan, R. (1973). + "Efficient algorithms for graph manipulation". + Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 + + """ + bccs = biconnected_components(G) + try: + bcc = next(bccs) + except StopIteration: + # No bicomponents (empty graph?) + return False + try: + next(bccs) + except StopIteration: + # Only one bicomponent + return len(bcc) == len(G) + else: + # Multiple bicomponents + return False + + +@not_implemented_for("directed") +@nx._dispatchable +def biconnected_component_edges(G): + """Returns a generator of lists of edges, one list for each biconnected + component of the input graph. + + Biconnected components are maximal subgraphs such that the removal of a + node (and all edges incident on that node) will not disconnect the + subgraph. Note that nodes may be part of more than one biconnected + component. Those nodes are articulation points, or cut vertices. + However, each edge belongs to one, and only one, biconnected component. + + Notice that by convention a dyad is considered a biconnected component. + + Parameters + ---------- + G : NetworkX Graph + An undirected graph. + + Returns + ------- + edges : generator of lists + Generator of lists of edges, one list for each bicomponent. + + Raises + ------ + NetworkXNotImplemented + If the input graph is not undirected. + + Examples + -------- + >>> G = nx.barbell_graph(4, 2) + >>> print(nx.is_biconnected(G)) + False + >>> bicomponents_edges = list(nx.biconnected_component_edges(G)) + >>> len(bicomponents_edges) + 5 + >>> G.add_edge(2, 8) + >>> print(nx.is_biconnected(G)) + True + >>> bicomponents_edges = list(nx.biconnected_component_edges(G)) + >>> len(bicomponents_edges) + 1 + + See Also + -------- + is_biconnected, + biconnected_components, + articulation_points, + + Notes + ----- + The algorithm to find articulation points and biconnected + components is implemented using a non-recursive depth-first-search + (DFS) that keeps track of the highest level that back edges reach + in the DFS tree. A node `n` is an articulation point if, and only + if, there exists a subtree rooted at `n` such that there is no + back edge from any successor of `n` that links to a predecessor of + `n` in the DFS tree. By keeping track of all the edges traversed + by the DFS we can obtain the biconnected components because all + edges of a bicomponent will be traversed consecutively between + articulation points. + + References + ---------- + .. [1] Hopcroft, J.; Tarjan, R. (1973). + "Efficient algorithms for graph manipulation". + Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 + + """ + yield from _biconnected_dfs(G, components=True) + + +@not_implemented_for("directed") +@nx._dispatchable +def biconnected_components(G): + """Returns a generator of sets of nodes, one set for each biconnected + component of the graph + + Biconnected components are maximal subgraphs such that the removal of a + node (and all edges incident on that node) will not disconnect the + subgraph. Note that nodes may be part of more than one biconnected + component. Those nodes are articulation points, or cut vertices. The + removal of articulation points will increase the number of connected + components of the graph. + + Notice that by convention a dyad is considered a biconnected component. + + Parameters + ---------- + G : NetworkX Graph + An undirected graph. + + Returns + ------- + nodes : generator + Generator of sets of nodes, one set for each biconnected component. + + Raises + ------ + NetworkXNotImplemented + If the input graph is not undirected. + + Examples + -------- + >>> G = nx.lollipop_graph(5, 1) + >>> print(nx.is_biconnected(G)) + False + >>> bicomponents = list(nx.biconnected_components(G)) + >>> len(bicomponents) + 2 + >>> G.add_edge(0, 5) + >>> print(nx.is_biconnected(G)) + True + >>> bicomponents = list(nx.biconnected_components(G)) + >>> len(bicomponents) + 1 + + You can generate a sorted list of biconnected components, largest + first, using sort. + + >>> G.remove_edge(0, 5) + >>> [len(c) for c in sorted(nx.biconnected_components(G), key=len, reverse=True)] + [5, 2] + + If you only want the largest connected component, it's more + efficient to use max instead of sort. + + >>> Gc = max(nx.biconnected_components(G), key=len) + + To create the components as subgraphs use: + ``(G.subgraph(c).copy() for c in biconnected_components(G))`` + + See Also + -------- + is_biconnected + articulation_points + biconnected_component_edges + k_components : this function is a special case where k=2 + bridge_components : similar to this function, but is defined using + 2-edge-connectivity instead of 2-node-connectivity. + + Notes + ----- + The algorithm to find articulation points and biconnected + components is implemented using a non-recursive depth-first-search + (DFS) that keeps track of the highest level that back edges reach + in the DFS tree. A node `n` is an articulation point if, and only + if, there exists a subtree rooted at `n` such that there is no + back edge from any successor of `n` that links to a predecessor of + `n` in the DFS tree. By keeping track of all the edges traversed + by the DFS we can obtain the biconnected components because all + edges of a bicomponent will be traversed consecutively between + articulation points. + + References + ---------- + .. [1] Hopcroft, J.; Tarjan, R. (1973). + "Efficient algorithms for graph manipulation". + Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 + + """ + for comp in _biconnected_dfs(G, components=True): + yield set(chain.from_iterable(comp)) + + +@not_implemented_for("directed") +@nx._dispatchable +def articulation_points(G): + """Yield the articulation points, or cut vertices, of a graph. + + An articulation point or cut vertex is any node whose removal (along with + all its incident edges) increases the number of connected components of + a graph. An undirected connected graph without articulation points is + biconnected. Articulation points belong to more than one biconnected + component of a graph. + + Notice that by convention a dyad is considered a biconnected component. + + Parameters + ---------- + G : NetworkX Graph + An undirected graph. + + Yields + ------ + node + An articulation point in the graph. + + Raises + ------ + NetworkXNotImplemented + If the input graph is not undirected. + + Examples + -------- + + >>> G = nx.barbell_graph(4, 2) + >>> print(nx.is_biconnected(G)) + False + >>> len(list(nx.articulation_points(G))) + 4 + >>> G.add_edge(2, 8) + >>> print(nx.is_biconnected(G)) + True + >>> len(list(nx.articulation_points(G))) + 0 + + See Also + -------- + is_biconnected + biconnected_components + biconnected_component_edges + + Notes + ----- + The algorithm to find articulation points and biconnected + components is implemented using a non-recursive depth-first-search + (DFS) that keeps track of the highest level that back edges reach + in the DFS tree. A node `n` is an articulation point if, and only + if, there exists a subtree rooted at `n` such that there is no + back edge from any successor of `n` that links to a predecessor of + `n` in the DFS tree. By keeping track of all the edges traversed + by the DFS we can obtain the biconnected components because all + edges of a bicomponent will be traversed consecutively between + articulation points. + + References + ---------- + .. [1] Hopcroft, J.; Tarjan, R. (1973). + "Efficient algorithms for graph manipulation". + Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 + + """ + seen = set() + for articulation in _biconnected_dfs(G, components=False): + if articulation not in seen: + seen.add(articulation) + yield articulation + + +@not_implemented_for("directed") +def _biconnected_dfs(G, components=True): + # depth-first search algorithm to generate articulation points + # and biconnected components + visited = set() + for start in G: + if start in visited: + continue + discovery = {start: 0} # time of first discovery of node during search + low = {start: 0} + root_children = 0 + visited.add(start) + edge_stack = [] + stack = [(start, start, iter(G[start]))] + edge_index = {} + while stack: + grandparent, parent, children = stack[-1] + try: + child = next(children) + if grandparent == child: + continue + if child in visited: + if discovery[child] <= discovery[parent]: # back edge + low[parent] = min(low[parent], discovery[child]) + if components: + edge_index[parent, child] = len(edge_stack) + edge_stack.append((parent, child)) + else: + low[child] = discovery[child] = len(discovery) + visited.add(child) + stack.append((parent, child, iter(G[child]))) + if components: + edge_index[parent, child] = len(edge_stack) + edge_stack.append((parent, child)) + + except StopIteration: + stack.pop() + if len(stack) > 1: + if low[parent] >= discovery[grandparent]: + if components: + ind = edge_index[grandparent, parent] + yield edge_stack[ind:] + del edge_stack[ind:] + + else: + yield grandparent + low[grandparent] = min(low[parent], low[grandparent]) + elif stack: # length 1 so grandparent is root + root_children += 1 + if components: + ind = edge_index[grandparent, parent] + yield edge_stack[ind:] + del edge_stack[ind:] + if not components: + # root node is articulation point if it has more than 1 child + if root_children > 1: + yield start diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/connected.py new file mode 100644 index 0000000..1884789 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/connected.py @@ -0,0 +1,216 @@ +"""Connected components.""" + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + +from ...utils import arbitrary_element + +__all__ = [ + "number_connected_components", + "connected_components", + "is_connected", + "node_connected_component", +] + + +@not_implemented_for("directed") +@nx._dispatchable +def connected_components(G): + """Generate connected components. + + Parameters + ---------- + G : NetworkX graph + An undirected graph + + Returns + ------- + comp : generator of sets + A generator of sets of nodes, one for each component of G. + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + Examples + -------- + Generate a sorted list of connected components, largest first. + + >>> G = nx.path_graph(4) + >>> nx.add_path(G, [10, 11, 12]) + >>> [len(c) for c in sorted(nx.connected_components(G), key=len, reverse=True)] + [4, 3] + + If you only want the largest connected component, it's more + efficient to use max instead of sort. + + >>> largest_cc = max(nx.connected_components(G), key=len) + + To create the induced subgraph of each component use: + + >>> S = [G.subgraph(c).copy() for c in nx.connected_components(G)] + + See Also + -------- + strongly_connected_components + weakly_connected_components + + Notes + ----- + For undirected graphs only. + + """ + seen = set() + n = len(G) + for v in G: + if v not in seen: + c = _plain_bfs(G, n - len(seen), v) + seen.update(c) + yield c + + +@not_implemented_for("directed") +@nx._dispatchable +def number_connected_components(G): + """Returns the number of connected components. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + Returns + ------- + n : integer + Number of connected components + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 2), (5, 6), (3, 4)]) + >>> nx.number_connected_components(G) + 3 + + See Also + -------- + connected_components + number_weakly_connected_components + number_strongly_connected_components + + Notes + ----- + For undirected graphs only. + + """ + return sum(1 for cc in connected_components(G)) + + +@not_implemented_for("directed") +@nx._dispatchable +def is_connected(G): + """Returns True if the graph is connected, False otherwise. + + Parameters + ---------- + G : NetworkX Graph + An undirected graph. + + Returns + ------- + connected : bool + True if the graph is connected, false otherwise. + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> print(nx.is_connected(G)) + True + + See Also + -------- + is_strongly_connected + is_weakly_connected + is_semiconnected + is_biconnected + connected_components + + Notes + ----- + For undirected graphs only. + + """ + n = len(G) + if n == 0: + raise nx.NetworkXPointlessConcept( + "Connectivity is undefined for the null graph." + ) + return sum(1 for node in _plain_bfs(G, n, arbitrary_element(G))) == len(G) + + +@not_implemented_for("directed") +@nx._dispatchable +def node_connected_component(G, n): + """Returns the set of nodes in the component of graph containing node n. + + Parameters + ---------- + G : NetworkX Graph + An undirected graph. + + n : node label + A node in G + + Returns + ------- + comp : set + A set of nodes in the component of G containing node n. + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (1, 2), (5, 6), (3, 4)]) + >>> nx.node_connected_component(G, 0) # nodes of component that contains node 0 + {0, 1, 2} + + See Also + -------- + connected_components + + Notes + ----- + For undirected graphs only. + + """ + return _plain_bfs(G, len(G), n) + + +def _plain_bfs(G, n, source): + """A fast BFS node generator""" + adj = G._adj + seen = {source} + nextlevel = [source] + while nextlevel: + thislevel = nextlevel + nextlevel = [] + for v in thislevel: + for w in adj[v]: + if w not in seen: + seen.add(w) + nextlevel.append(w) + if len(seen) == n: + return seen + return seen diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/semiconnected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/semiconnected.py new file mode 100644 index 0000000..9ca5d76 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/semiconnected.py @@ -0,0 +1,71 @@ +"""Semiconnectedness.""" + +import networkx as nx +from networkx.utils import not_implemented_for, pairwise + +__all__ = ["is_semiconnected"] + + +@not_implemented_for("undirected") +@nx._dispatchable +def is_semiconnected(G): + r"""Returns True if the graph is semiconnected, False otherwise. + + A graph is semiconnected if and only if for any pair of nodes, either one + is reachable from the other, or they are mutually reachable. + + This function uses a theorem that states that a DAG is semiconnected + if for any topological sort, for node $v_n$ in that sort, there is an + edge $(v_i, v_{i+1})$. That allows us to check if a non-DAG `G` is + semiconnected by condensing the graph: i.e. constructing a new graph `H` + with nodes being the strongly connected components of `G`, and edges + (scc_1, scc_2) if there is a edge $(v_1, v_2)$ in `G` for some + $v_1 \in scc_1$ and $v_2 \in scc_2$. That results in a DAG, so we compute + the topological sort of `H` and check if for every $n$ there is an edge + $(scc_n, scc_{n+1})$. + + Parameters + ---------- + G : NetworkX graph + A directed graph. + + Returns + ------- + semiconnected : bool + True if the graph is semiconnected, False otherwise. + + Raises + ------ + NetworkXNotImplemented + If the input graph is undirected. + + NetworkXPointlessConcept + If the graph is empty. + + Examples + -------- + >>> G = nx.path_graph(4, create_using=nx.DiGraph()) + >>> print(nx.is_semiconnected(G)) + True + >>> G = nx.DiGraph([(1, 2), (3, 2)]) + >>> print(nx.is_semiconnected(G)) + False + + See Also + -------- + is_strongly_connected + is_weakly_connected + is_connected + is_biconnected + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept( + "Connectivity is undefined for the null graph." + ) + + if not nx.is_weakly_connected(G): + return False + + H = nx.condensation(G) + + return all(H.has_edge(u, v) for u, v in pairwise(nx.topological_sort(H))) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/strongly_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/strongly_connected.py new file mode 100644 index 0000000..393728f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/strongly_connected.py @@ -0,0 +1,351 @@ +"""Strongly connected components.""" + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + +__all__ = [ + "number_strongly_connected_components", + "strongly_connected_components", + "is_strongly_connected", + "kosaraju_strongly_connected_components", + "condensation", +] + + +@not_implemented_for("undirected") +@nx._dispatchable +def strongly_connected_components(G): + """Generate nodes in strongly connected components of graph. + + Parameters + ---------- + G : NetworkX Graph + A directed graph. + + Returns + ------- + comp : generator of sets + A generator of sets of nodes, one for each strongly connected + component of G. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + Generate a sorted list of strongly connected components, largest first. + + >>> G = nx.cycle_graph(4, create_using=nx.DiGraph()) + >>> nx.add_cycle(G, [10, 11, 12]) + >>> [ + ... len(c) + ... for c in sorted(nx.strongly_connected_components(G), key=len, reverse=True) + ... ] + [4, 3] + + If you only want the largest component, it's more efficient to + use max instead of sort. + + >>> largest = max(nx.strongly_connected_components(G), key=len) + + See Also + -------- + connected_components + weakly_connected_components + kosaraju_strongly_connected_components + + Notes + ----- + Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_. + Nonrecursive version of algorithm. + + References + ---------- + .. [1] Depth-first search and linear graph algorithms, R. Tarjan + SIAM Journal of Computing 1(2):146-160, (1972). + + .. [2] On finding the strongly connected components in a directed graph. + E. Nuutila and E. Soisalon-Soinen + Information Processing Letters 49(1): 9-14, (1994).. + + """ + preorder = {} + lowlink = {} + scc_found = set() + scc_queue = [] + i = 0 # Preorder counter + neighbors = {v: iter(G[v]) for v in G} + for source in G: + if source not in scc_found: + queue = [source] + while queue: + v = queue[-1] + if v not in preorder: + i = i + 1 + preorder[v] = i + done = True + for w in neighbors[v]: + if w not in preorder: + queue.append(w) + done = False + break + if done: + lowlink[v] = preorder[v] + for w in G[v]: + if w not in scc_found: + if preorder[w] > preorder[v]: + lowlink[v] = min([lowlink[v], lowlink[w]]) + else: + lowlink[v] = min([lowlink[v], preorder[w]]) + queue.pop() + if lowlink[v] == preorder[v]: + scc = {v} + while scc_queue and preorder[scc_queue[-1]] > preorder[v]: + k = scc_queue.pop() + scc.add(k) + scc_found.update(scc) + yield scc + else: + scc_queue.append(v) + + +@not_implemented_for("undirected") +@nx._dispatchable +def kosaraju_strongly_connected_components(G, source=None): + """Generate nodes in strongly connected components of graph. + + Parameters + ---------- + G : NetworkX Graph + A directed graph. + + Returns + ------- + comp : generator of sets + A generator of sets of nodes, one for each strongly connected + component of G. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + Generate a sorted list of strongly connected components, largest first. + + >>> G = nx.cycle_graph(4, create_using=nx.DiGraph()) + >>> nx.add_cycle(G, [10, 11, 12]) + >>> [ + ... len(c) + ... for c in sorted( + ... nx.kosaraju_strongly_connected_components(G), key=len, reverse=True + ... ) + ... ] + [4, 3] + + If you only want the largest component, it's more efficient to + use max instead of sort. + + >>> largest = max(nx.kosaraju_strongly_connected_components(G), key=len) + + See Also + -------- + strongly_connected_components + + Notes + ----- + Uses Kosaraju's algorithm. + + """ + post = list(nx.dfs_postorder_nodes(G.reverse(copy=False), source=source)) + + seen = set() + while post: + r = post.pop() + if r in seen: + continue + c = nx.dfs_preorder_nodes(G, r) + new = {v for v in c if v not in seen} + seen.update(new) + yield new + + +@not_implemented_for("undirected") +@nx._dispatchable +def number_strongly_connected_components(G): + """Returns number of strongly connected components in graph. + + Parameters + ---------- + G : NetworkX graph + A directed graph. + + Returns + ------- + n : integer + Number of strongly connected components + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + >>> G = nx.DiGraph( + ... [(0, 1), (1, 2), (2, 0), (2, 3), (4, 5), (3, 4), (5, 6), (6, 3), (6, 7)] + ... ) + >>> nx.number_strongly_connected_components(G) + 3 + + See Also + -------- + strongly_connected_components + number_connected_components + number_weakly_connected_components + + Notes + ----- + For directed graphs only. + """ + return sum(1 for scc in strongly_connected_components(G)) + + +@not_implemented_for("undirected") +@nx._dispatchable +def is_strongly_connected(G): + """Test directed graph for strong connectivity. + + A directed graph is strongly connected if and only if every vertex in + the graph is reachable from every other vertex. + + Parameters + ---------- + G : NetworkX Graph + A directed graph. + + Returns + ------- + connected : bool + True if the graph is strongly connected, False otherwise. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0), (2, 4), (4, 2)]) + >>> nx.is_strongly_connected(G) + True + >>> G.remove_edge(2, 3) + >>> nx.is_strongly_connected(G) + False + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + See Also + -------- + is_weakly_connected + is_semiconnected + is_connected + is_biconnected + strongly_connected_components + + Notes + ----- + For directed graphs only. + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept( + """Connectivity is undefined for the null graph.""" + ) + + return len(next(strongly_connected_components(G))) == len(G) + + +@not_implemented_for("undirected") +@nx._dispatchable(returns_graph=True) +def condensation(G, scc=None): + """Returns the condensation of G. + + The condensation of G is the graph with each of the strongly connected + components contracted into a single node. + + Parameters + ---------- + G : NetworkX DiGraph + A directed graph. + + scc: list or generator (optional, default=None) + Strongly connected components. If provided, the elements in + `scc` must partition the nodes in `G`. If not provided, it will be + calculated as scc=nx.strongly_connected_components(G). + + Returns + ------- + C : NetworkX DiGraph + The condensation graph C of G. The node labels are integers + corresponding to the index of the component in the list of + strongly connected components of G. C has a graph attribute named + 'mapping' with a dictionary mapping the original nodes to the + nodes in C to which they belong. Each node in C also has a node + attribute 'members' with the set of original nodes in G that + form the SCC that the node in C represents. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + Contracting two sets of strongly connected nodes into two distinct SCC + using the barbell graph. + + >>> G = nx.barbell_graph(4, 0) + >>> G.remove_edge(3, 4) + >>> G = nx.DiGraph(G) + >>> H = nx.condensation(G) + >>> H.nodes.data() + NodeDataView({0: {'members': {0, 1, 2, 3}}, 1: {'members': {4, 5, 6, 7}}}) + >>> H.graph["mapping"] + {0: 0, 1: 0, 2: 0, 3: 0, 4: 1, 5: 1, 6: 1, 7: 1} + + Contracting a complete graph into one single SCC. + + >>> G = nx.complete_graph(7, create_using=nx.DiGraph) + >>> H = nx.condensation(G) + >>> H.nodes + NodeView((0,)) + >>> H.nodes.data() + NodeDataView({0: {'members': {0, 1, 2, 3, 4, 5, 6}}}) + + Notes + ----- + After contracting all strongly connected components to a single node, + the resulting graph is a directed acyclic graph. + + """ + if scc is None: + scc = nx.strongly_connected_components(G) + mapping = {} + members = {} + C = nx.DiGraph() + # Add mapping dict as graph attribute + C.graph["mapping"] = mapping + if len(G) == 0: + return C + for i, component in enumerate(scc): + members[i] = component + mapping.update((n, i) for n in component) + number_of_components = i + 1 + C.add_nodes_from(range(number_of_components)) + C.add_edges_from( + (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v] + ) + # Add a list of members (ie original nodes) to each node (ie scc) in C. + nx.set_node_attributes(C, members, "members") + return C diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..dfb4e80 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-312.pyc new file mode 100644 index 0000000..97ebb26 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_attracting.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-312.pyc new file mode 100644 index 0000000..5de7b41 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_biconnected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_connected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_connected.cpython-312.pyc new file mode 100644 index 0000000..37dc6a5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_connected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_semiconnected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_semiconnected.cpython-312.pyc new file mode 100644 index 0000000..c14733f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_semiconnected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_strongly_connected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_strongly_connected.cpython-312.pyc new file mode 100644 index 0000000..ea37d71 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_strongly_connected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_weakly_connected.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_weakly_connected.cpython-312.pyc new file mode 100644 index 0000000..16ea897 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/__pycache__/test_weakly_connected.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_attracting.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_attracting.py new file mode 100644 index 0000000..336c40d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_attracting.py @@ -0,0 +1,70 @@ +import pytest + +import networkx as nx +from networkx import NetworkXNotImplemented + + +class TestAttractingComponents: + @classmethod + def setup_class(cls): + cls.G1 = nx.DiGraph() + cls.G1.add_edges_from( + [ + (5, 11), + (11, 2), + (11, 9), + (11, 10), + (7, 11), + (7, 8), + (8, 9), + (3, 8), + (3, 10), + ] + ) + cls.G2 = nx.DiGraph() + cls.G2.add_edges_from([(0, 1), (0, 2), (1, 1), (1, 2), (2, 1)]) + + cls.G3 = nx.DiGraph() + cls.G3.add_edges_from([(0, 1), (1, 2), (2, 1), (0, 3), (3, 4), (4, 3)]) + + cls.G4 = nx.DiGraph() + + def test_attracting_components(self): + ac = list(nx.attracting_components(self.G1)) + assert {2} in ac + assert {9} in ac + assert {10} in ac + + ac = list(nx.attracting_components(self.G2)) + ac = [tuple(sorted(x)) for x in ac] + assert ac == [(1, 2)] + + ac = list(nx.attracting_components(self.G3)) + ac = [tuple(sorted(x)) for x in ac] + assert (1, 2) in ac + assert (3, 4) in ac + assert len(ac) == 2 + + ac = list(nx.attracting_components(self.G4)) + assert ac == [] + + def test_number_attacting_components(self): + assert nx.number_attracting_components(self.G1) == 3 + assert nx.number_attracting_components(self.G2) == 1 + assert nx.number_attracting_components(self.G3) == 2 + assert nx.number_attracting_components(self.G4) == 0 + + def test_is_attracting_component(self): + assert not nx.is_attracting_component(self.G1) + assert not nx.is_attracting_component(self.G2) + assert not nx.is_attracting_component(self.G3) + g2 = self.G3.subgraph([1, 2]) + assert nx.is_attracting_component(g2) + assert not nx.is_attracting_component(self.G4) + + def test_connected_raise(self): + G = nx.Graph() + with pytest.raises(NetworkXNotImplemented): + next(nx.attracting_components(G)) + pytest.raises(NetworkXNotImplemented, nx.number_attracting_components, G) + pytest.raises(NetworkXNotImplemented, nx.is_attracting_component, G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_biconnected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_biconnected.py new file mode 100644 index 0000000..19d2d88 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_biconnected.py @@ -0,0 +1,248 @@ +import pytest + +import networkx as nx +from networkx import NetworkXNotImplemented + + +def assert_components_edges_equal(x, y): + sx = {frozenset(frozenset(e) for e in c) for c in x} + sy = {frozenset(frozenset(e) for e in c) for c in y} + assert sx == sy + + +def assert_components_equal(x, y): + sx = {frozenset(c) for c in x} + sy = {frozenset(c) for c in y} + assert sx == sy + + +def test_barbell(): + G = nx.barbell_graph(8, 4) + nx.add_path(G, [7, 20, 21, 22]) + nx.add_cycle(G, [22, 23, 24, 25]) + pts = set(nx.articulation_points(G)) + assert pts == {7, 8, 9, 10, 11, 12, 20, 21, 22} + + answer = [ + {12, 13, 14, 15, 16, 17, 18, 19}, + {0, 1, 2, 3, 4, 5, 6, 7}, + {22, 23, 24, 25}, + {11, 12}, + {10, 11}, + {9, 10}, + {8, 9}, + {7, 8}, + {21, 22}, + {20, 21}, + {7, 20}, + ] + assert_components_equal(list(nx.biconnected_components(G)), answer) + + G.add_edge(2, 17) + pts = set(nx.articulation_points(G)) + assert pts == {7, 20, 21, 22} + + +def test_articulation_points_repetitions(): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (1, 3)]) + assert list(nx.articulation_points(G)) == [1] + + +def test_articulation_points_cycle(): + G = nx.cycle_graph(3) + nx.add_cycle(G, [1, 3, 4]) + pts = set(nx.articulation_points(G)) + assert pts == {1} + + +def test_is_biconnected(): + G = nx.cycle_graph(3) + assert nx.is_biconnected(G) + nx.add_cycle(G, [1, 3, 4]) + assert not nx.is_biconnected(G) + + +def test_empty_is_biconnected(): + G = nx.empty_graph(5) + assert not nx.is_biconnected(G) + G.add_edge(0, 1) + assert not nx.is_biconnected(G) + + +def test_biconnected_components_cycle(): + G = nx.cycle_graph(3) + nx.add_cycle(G, [1, 3, 4]) + answer = [{0, 1, 2}, {1, 3, 4}] + assert_components_equal(list(nx.biconnected_components(G)), answer) + + +def test_biconnected_components1(): + # graph example from + # https://web.archive.org/web/20121229123447/http://www.ibluemojo.com/school/articul_algorithm.html + edges = [ + (0, 1), + (0, 5), + (0, 6), + (0, 14), + (1, 5), + (1, 6), + (1, 14), + (2, 4), + (2, 10), + (3, 4), + (3, 15), + (4, 6), + (4, 7), + (4, 10), + (5, 14), + (6, 14), + (7, 9), + (8, 9), + (8, 12), + (8, 13), + (10, 15), + (11, 12), + (11, 13), + (12, 13), + ] + G = nx.Graph(edges) + pts = set(nx.articulation_points(G)) + assert pts == {4, 6, 7, 8, 9} + comps = list(nx.biconnected_component_edges(G)) + answer = [ + [(3, 4), (15, 3), (10, 15), (10, 4), (2, 10), (4, 2)], + [(13, 12), (13, 8), (11, 13), (12, 11), (8, 12)], + [(9, 8)], + [(7, 9)], + [(4, 7)], + [(6, 4)], + [(14, 0), (5, 1), (5, 0), (14, 5), (14, 1), (6, 14), (6, 0), (1, 6), (0, 1)], + ] + assert_components_edges_equal(comps, answer) + + +def test_biconnected_components2(): + G = nx.Graph() + nx.add_cycle(G, "ABC") + nx.add_cycle(G, "CDE") + nx.add_cycle(G, "FIJHG") + nx.add_cycle(G, "GIJ") + G.add_edge("E", "G") + comps = list(nx.biconnected_component_edges(G)) + answer = [ + [ + tuple("GF"), + tuple("FI"), + tuple("IG"), + tuple("IJ"), + tuple("JG"), + tuple("JH"), + tuple("HG"), + ], + [tuple("EG")], + [tuple("CD"), tuple("DE"), tuple("CE")], + [tuple("AB"), tuple("BC"), tuple("AC")], + ] + assert_components_edges_equal(comps, answer) + + +def test_biconnected_davis(): + D = nx.davis_southern_women_graph() + bcc = list(nx.biconnected_components(D))[0] + assert set(D) == bcc # All nodes in a giant bicomponent + # So no articulation points + assert len(list(nx.articulation_points(D))) == 0 + + +def test_biconnected_karate(): + K = nx.karate_club_graph() + answer = [ + { + 0, + 1, + 2, + 3, + 7, + 8, + 9, + 12, + 13, + 14, + 15, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + }, + {0, 4, 5, 6, 10, 16}, + {0, 11}, + ] + bcc = list(nx.biconnected_components(K)) + assert_components_equal(bcc, answer) + assert set(nx.articulation_points(K)) == {0} + + +def test_biconnected_eppstein(): + # tests from http://www.ics.uci.edu/~eppstein/PADS/Biconnectivity.py + G1 = nx.Graph( + { + 0: [1, 2, 5], + 1: [0, 5], + 2: [0, 3, 4], + 3: [2, 4, 5, 6], + 4: [2, 3, 5, 6], + 5: [0, 1, 3, 4], + 6: [3, 4], + } + ) + G2 = nx.Graph( + { + 0: [2, 5], + 1: [3, 8], + 2: [0, 3, 5], + 3: [1, 2, 6, 8], + 4: [7], + 5: [0, 2], + 6: [3, 8], + 7: [4], + 8: [1, 3, 6], + } + ) + assert nx.is_biconnected(G1) + assert not nx.is_biconnected(G2) + answer_G2 = [{1, 3, 6, 8}, {0, 2, 5}, {2, 3}, {4, 7}] + bcc = list(nx.biconnected_components(G2)) + assert_components_equal(bcc, answer_G2) + + +def test_null_graph(): + G = nx.Graph() + assert not nx.is_biconnected(G) + assert list(nx.biconnected_components(G)) == [] + assert list(nx.biconnected_component_edges(G)) == [] + assert list(nx.articulation_points(G)) == [] + + +def test_connected_raise(): + DG = nx.DiGraph() + with pytest.raises(NetworkXNotImplemented): + next(nx.biconnected_components(DG)) + with pytest.raises(NetworkXNotImplemented): + next(nx.biconnected_component_edges(DG)) + with pytest.raises(NetworkXNotImplemented): + next(nx.articulation_points(DG)) + pytest.raises(NetworkXNotImplemented, nx.is_biconnected, DG) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_connected.py new file mode 100644 index 0000000..207214c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_connected.py @@ -0,0 +1,138 @@ +import pytest + +import networkx as nx +from networkx import NetworkXNotImplemented +from networkx import convert_node_labels_to_integers as cnlti +from networkx.classes.tests import dispatch_interface + + +class TestConnected: + @classmethod + def setup_class(cls): + G1 = cnlti(nx.grid_2d_graph(2, 2), first_label=0, ordering="sorted") + G2 = cnlti(nx.lollipop_graph(3, 3), first_label=4, ordering="sorted") + G3 = cnlti(nx.house_graph(), first_label=10, ordering="sorted") + cls.G = nx.union(G1, G2) + cls.G = nx.union(cls.G, G3) + cls.DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)]) + cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1) + + cls.gc = [] + G = nx.DiGraph() + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) + C = [[3, 4, 5, 7], [1, 2, 8], [6]] + cls.gc.append((G, C)) + + G = nx.DiGraph() + G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)]) + C = [[2, 3, 4], [1]] + cls.gc.append((G, C)) + + G = nx.DiGraph() + G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)]) + C = [[1, 2, 3]] + cls.gc.append((G, C)) + + # Eppstein's tests + G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []}) + C = [[0], [1], [2], [3], [4], [5], [6]] + cls.gc.append((G, C)) + + G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]}) + C = [[0, 1, 2], [3, 4]] + cls.gc.append((G, C)) + + G = nx.DiGraph() + C = [] + cls.gc.append((G, C)) + + def test_connected_components(self): + # Test duplicated below + cc = nx.connected_components + G = self.G + C = { + frozenset([0, 1, 2, 3]), + frozenset([4, 5, 6, 7, 8, 9]), + frozenset([10, 11, 12, 13, 14]), + } + assert {frozenset(g) for g in cc(G)} == C + + def test_connected_components_nx_loopback(self): + # This tests the @nx._dispatchable mechanism, treating nx.connected_components + # as if it were a re-implementation from another package. + # Test duplicated from above + cc = nx.connected_components + G = dispatch_interface.convert(self.G) + C = { + frozenset([0, 1, 2, 3]), + frozenset([4, 5, 6, 7, 8, 9]), + frozenset([10, 11, 12, 13, 14]), + } + if "nx_loopback" in nx.config.backends or not nx.config.backends: + # If `nx.config.backends` is empty, then `_dispatchable.__call__` takes a + # "fast path" and does not check graph inputs, so using an unknown backend + # here will still work. + assert {frozenset(g) for g in cc(G)} == C + else: + # This raises, because "nx_loopback" is not registered as a backend. + with pytest.raises( + ImportError, match="'nx_loopback' backend is not installed" + ): + cc(G) + + def test_number_connected_components(self): + ncc = nx.number_connected_components + assert ncc(self.G) == 3 + + def test_number_connected_components2(self): + ncc = nx.number_connected_components + assert ncc(self.grid) == 1 + + def test_connected_components2(self): + cc = nx.connected_components + G = self.grid + C = {frozenset([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16])} + assert {frozenset(g) for g in cc(G)} == C + + def test_node_connected_components(self): + ncc = nx.node_connected_component + G = self.grid + C = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16} + assert ncc(G, 1) == C + + def test_is_connected(self): + assert nx.is_connected(self.grid) + G = nx.Graph() + G.add_nodes_from([1, 2]) + assert not nx.is_connected(G) + + def test_connected_raise(self): + with pytest.raises(NetworkXNotImplemented): + next(nx.connected_components(self.DG)) + pytest.raises(NetworkXNotImplemented, nx.number_connected_components, self.DG) + pytest.raises(NetworkXNotImplemented, nx.node_connected_component, self.DG, 1) + pytest.raises(NetworkXNotImplemented, nx.is_connected, self.DG) + pytest.raises(nx.NetworkXPointlessConcept, nx.is_connected, nx.Graph()) + + def test_connected_mutability(self): + G = self.grid + seen = set() + for component in nx.connected_components(G): + assert len(seen & component) == 0 + seen.update(component) + component.clear() diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_semiconnected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_semiconnected.py new file mode 100644 index 0000000..6376bbf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_semiconnected.py @@ -0,0 +1,55 @@ +from itertools import chain + +import pytest + +import networkx as nx + + +class TestIsSemiconnected: + def test_undirected(self): + pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.Graph()) + pytest.raises(nx.NetworkXNotImplemented, nx.is_semiconnected, nx.MultiGraph()) + + def test_empty(self): + pytest.raises(nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.DiGraph()) + pytest.raises( + nx.NetworkXPointlessConcept, nx.is_semiconnected, nx.MultiDiGraph() + ) + + def test_single_node_graph(self): + G = nx.DiGraph() + G.add_node(0) + assert nx.is_semiconnected(G) + + def test_path(self): + G = nx.path_graph(100, create_using=nx.DiGraph()) + assert nx.is_semiconnected(G) + G.add_edge(100, 99) + assert not nx.is_semiconnected(G) + + def test_cycle(self): + G = nx.cycle_graph(100, create_using=nx.DiGraph()) + assert nx.is_semiconnected(G) + G = nx.path_graph(100, create_using=nx.DiGraph()) + G.add_edge(0, 99) + assert nx.is_semiconnected(G) + + def test_tree(self): + G = nx.DiGraph() + G.add_edges_from( + chain.from_iterable([(i, 2 * i + 1), (i, 2 * i + 2)] for i in range(100)) + ) + assert not nx.is_semiconnected(G) + + def test_dumbbell(self): + G = nx.cycle_graph(100, create_using=nx.DiGraph()) + G.add_edges_from((i + 100, (i + 1) % 100 + 100) for i in range(100)) + assert not nx.is_semiconnected(G) # G is disconnected. + G.add_edge(100, 99) + assert nx.is_semiconnected(G) + + def test_alternating_path(self): + G = nx.DiGraph( + chain.from_iterable([(i, i - 1), (i, i + 1)] for i in range(0, 100, 2)) + ) + assert not nx.is_semiconnected(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py new file mode 100644 index 0000000..27f4098 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_strongly_connected.py @@ -0,0 +1,193 @@ +import pytest + +import networkx as nx +from networkx import NetworkXNotImplemented + + +class TestStronglyConnected: + @classmethod + def setup_class(cls): + cls.gc = [] + G = nx.DiGraph() + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) + C = {frozenset([3, 4, 5, 7]), frozenset([1, 2, 8]), frozenset([6])} + cls.gc.append((G, C)) + + G = nx.DiGraph() + G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)]) + C = {frozenset([2, 3, 4]), frozenset([1])} + cls.gc.append((G, C)) + + G = nx.DiGraph() + G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)]) + C = {frozenset([1, 2, 3])} + cls.gc.append((G, C)) + + # Eppstein's tests + G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []}) + C = { + frozenset([0]), + frozenset([1]), + frozenset([2]), + frozenset([3]), + frozenset([4]), + frozenset([5]), + frozenset([6]), + } + cls.gc.append((G, C)) + + G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]}) + C = {frozenset([0, 1, 2]), frozenset([3, 4])} + cls.gc.append((G, C)) + + def test_tarjan(self): + scc = nx.strongly_connected_components + for G, C in self.gc: + assert {frozenset(g) for g in scc(G)} == C + + def test_kosaraju(self): + scc = nx.kosaraju_strongly_connected_components + for G, C in self.gc: + assert {frozenset(g) for g in scc(G)} == C + + def test_number_strongly_connected_components(self): + ncc = nx.number_strongly_connected_components + for G, C in self.gc: + assert ncc(G) == len(C) + + def test_is_strongly_connected(self): + for G, C in self.gc: + if len(C) == 1: + assert nx.is_strongly_connected(G) + else: + assert not nx.is_strongly_connected(G) + + def test_contract_scc1(self): + G = nx.DiGraph() + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 11), + (2, 12), + (3, 4), + (4, 3), + (4, 5), + (5, 6), + (6, 5), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 9), + (9, 7), + (10, 6), + (11, 2), + (11, 4), + (11, 6), + (12, 6), + (12, 11), + ] + ) + scc = list(nx.strongly_connected_components(G)) + cG = nx.condensation(G, scc) + # DAG + assert nx.is_directed_acyclic_graph(cG) + # nodes + assert sorted(cG.nodes()) == [0, 1, 2, 3] + # edges + mapping = {} + for i, component in enumerate(scc): + for n in component: + mapping[n] = i + edge = (mapping[2], mapping[3]) + assert cG.has_edge(*edge) + edge = (mapping[2], mapping[5]) + assert cG.has_edge(*edge) + edge = (mapping[3], mapping[5]) + assert cG.has_edge(*edge) + + def test_contract_scc_isolate(self): + # Bug found and fixed in [1687]. + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(2, 1) + scc = list(nx.strongly_connected_components(G)) + cG = nx.condensation(G, scc) + assert list(cG.nodes()) == [0] + assert list(cG.edges()) == [] + + def test_contract_scc_edge(self): + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(2, 1) + G.add_edge(2, 3) + G.add_edge(3, 4) + G.add_edge(4, 3) + scc = list(nx.strongly_connected_components(G)) + cG = nx.condensation(G, scc) + assert sorted(cG.nodes()) == [0, 1] + if 1 in scc[0]: + edge = (0, 1) + else: + edge = (1, 0) + assert list(cG.edges()) == [edge] + + def test_condensation_mapping_and_members(self): + G, C = self.gc[1] + C = sorted(C, key=len, reverse=True) + cG = nx.condensation(G) + mapping = cG.graph["mapping"] + assert all(n in G for n in mapping) + assert all(0 == cN for n, cN in mapping.items() if n in C[0]) + assert all(1 == cN for n, cN in mapping.items() if n in C[1]) + for n, d in cG.nodes(data=True): + assert set(C[n]) == cG.nodes[n]["members"] + + def test_null_graph(self): + G = nx.DiGraph() + assert list(nx.strongly_connected_components(G)) == [] + assert list(nx.kosaraju_strongly_connected_components(G)) == [] + assert len(nx.condensation(G)) == 0 + pytest.raises( + nx.NetworkXPointlessConcept, nx.is_strongly_connected, nx.DiGraph() + ) + + def test_connected_raise(self): + G = nx.Graph() + with pytest.raises(NetworkXNotImplemented): + next(nx.strongly_connected_components(G)) + with pytest.raises(NetworkXNotImplemented): + next(nx.kosaraju_strongly_connected_components(G)) + pytest.raises(NetworkXNotImplemented, nx.is_strongly_connected, G) + pytest.raises(NetworkXNotImplemented, nx.condensation, G) + + strong_cc_methods = ( + nx.strongly_connected_components, + nx.kosaraju_strongly_connected_components, + ) + + @pytest.mark.parametrize("get_components", strong_cc_methods) + def test_connected_mutability(self, get_components): + DG = nx.path_graph(5, create_using=nx.DiGraph) + G = nx.disjoint_union(DG, DG) + seen = set() + for component in get_components(G): + assert len(seen & component) == 0 + seen.update(component) + component.clear() diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py new file mode 100644 index 0000000..f014478 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/tests/test_weakly_connected.py @@ -0,0 +1,96 @@ +import pytest + +import networkx as nx +from networkx import NetworkXNotImplemented + + +class TestWeaklyConnected: + @classmethod + def setup_class(cls): + cls.gc = [] + G = nx.DiGraph() + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 8), + (3, 4), + (3, 7), + (4, 5), + (5, 3), + (5, 6), + (7, 4), + (7, 6), + (8, 1), + (8, 7), + ] + ) + C = [[3, 4, 5, 7], [1, 2, 8], [6]] + cls.gc.append((G, C)) + + G = nx.DiGraph() + G.add_edges_from([(1, 2), (1, 3), (1, 4), (4, 2), (3, 4), (2, 3)]) + C = [[2, 3, 4], [1]] + cls.gc.append((G, C)) + + G = nx.DiGraph() + G.add_edges_from([(1, 2), (2, 3), (3, 2), (2, 1)]) + C = [[1, 2, 3]] + cls.gc.append((G, C)) + + # Eppstein's tests + G = nx.DiGraph({0: [1], 1: [2, 3], 2: [4, 5], 3: [4, 5], 4: [6], 5: [], 6: []}) + C = [[0], [1], [2], [3], [4], [5], [6]] + cls.gc.append((G, C)) + + G = nx.DiGraph({0: [1], 1: [2, 3, 4], 2: [0, 3], 3: [4], 4: [3]}) + C = [[0, 1, 2], [3, 4]] + cls.gc.append((G, C)) + + def test_weakly_connected_components(self): + for G, C in self.gc: + U = G.to_undirected() + w = {frozenset(g) for g in nx.weakly_connected_components(G)} + c = {frozenset(g) for g in nx.connected_components(U)} + assert w == c + + def test_number_weakly_connected_components(self): + for G, C in self.gc: + U = G.to_undirected() + w = nx.number_weakly_connected_components(G) + c = nx.number_connected_components(U) + assert w == c + + def test_is_weakly_connected(self): + for G, C in self.gc: + U = G.to_undirected() + assert nx.is_weakly_connected(G) == nx.is_connected(U) + + def test_null_graph(self): + G = nx.DiGraph() + assert list(nx.weakly_connected_components(G)) == [] + assert nx.number_weakly_connected_components(G) == 0 + with pytest.raises(nx.NetworkXPointlessConcept): + next(nx.is_weakly_connected(G)) + + def test_connected_raise(self): + G = nx.Graph() + with pytest.raises(NetworkXNotImplemented): + next(nx.weakly_connected_components(G)) + pytest.raises(NetworkXNotImplemented, nx.number_weakly_connected_components, G) + pytest.raises(NetworkXNotImplemented, nx.is_weakly_connected, G) + + def test_connected_mutability(self): + DG = nx.path_graph(5, create_using=nx.DiGraph) + G = nx.disjoint_union(DG, DG) + seen = set() + for component in nx.weakly_connected_components(G): + assert len(seen & component) == 0 + seen.update(component) + component.clear() + + +def test_is_weakly_connected_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"): + nx.is_weakly_connected(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/components/weakly_connected.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/weakly_connected.py new file mode 100644 index 0000000..564adb9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/components/weakly_connected.py @@ -0,0 +1,197 @@ +"""Weakly connected components.""" + +import networkx as nx +from networkx.utils.decorators import not_implemented_for + +__all__ = [ + "number_weakly_connected_components", + "weakly_connected_components", + "is_weakly_connected", +] + + +@not_implemented_for("undirected") +@nx._dispatchable +def weakly_connected_components(G): + """Generate weakly connected components of G. + + Parameters + ---------- + G : NetworkX graph + A directed graph + + Returns + ------- + comp : generator of sets + A generator of sets of nodes, one for each weakly connected + component of G. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + Generate a sorted list of weakly connected components, largest first. + + >>> G = nx.path_graph(4, create_using=nx.DiGraph()) + >>> nx.add_path(G, [10, 11, 12]) + >>> [ + ... len(c) + ... for c in sorted(nx.weakly_connected_components(G), key=len, reverse=True) + ... ] + [4, 3] + + If you only want the largest component, it's more efficient to + use max instead of sort: + + >>> largest_cc = max(nx.weakly_connected_components(G), key=len) + + See Also + -------- + connected_components + strongly_connected_components + + Notes + ----- + For directed graphs only. + + """ + seen = set() + n = len(G) # must be outside the loop to avoid performance hit with graph views + for v in G: + if v not in seen: + c = set(_plain_bfs(G, n - len(seen), v)) + seen.update(c) + yield c + + +@not_implemented_for("undirected") +@nx._dispatchable +def number_weakly_connected_components(G): + """Returns the number of weakly connected components in G. + + Parameters + ---------- + G : NetworkX graph + A directed graph. + + Returns + ------- + n : integer + Number of weakly connected components + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (2, 1), (3, 4)]) + >>> nx.number_weakly_connected_components(G) + 2 + + See Also + -------- + weakly_connected_components + number_connected_components + number_strongly_connected_components + + Notes + ----- + For directed graphs only. + + """ + return sum(1 for wcc in weakly_connected_components(G)) + + +@not_implemented_for("undirected") +@nx._dispatchable +def is_weakly_connected(G): + """Test directed graph for weak connectivity. + + A directed graph is weakly connected if and only if the graph + is connected when the direction of the edge between nodes is ignored. + + Note that if a graph is strongly connected (i.e. the graph is connected + even when we account for directionality), it is by definition weakly + connected as well. + + Parameters + ---------- + G : NetworkX Graph + A directed graph. + + Returns + ------- + connected : bool + True if the graph is weakly connected, False otherwise. + + Raises + ------ + NetworkXNotImplemented + If G is undirected. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (2, 1)]) + >>> G.add_node(3) + >>> nx.is_weakly_connected(G) # node 3 is not connected to the graph + False + >>> G.add_edge(2, 3) + >>> nx.is_weakly_connected(G) + True + + See Also + -------- + is_strongly_connected + is_semiconnected + is_connected + is_biconnected + weakly_connected_components + + Notes + ----- + For directed graphs only. + + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept( + """Connectivity is undefined for the null graph.""" + ) + + return len(next(weakly_connected_components(G))) == len(G) + + +def _plain_bfs(G, n, source): + """A fast BFS node generator + + The direction of the edge between nodes is ignored. + + For directed graphs only. + + """ + Gsucc = G._succ + Gpred = G._pred + seen = {source} + nextlevel = [source] + + yield source + while nextlevel: + thislevel = nextlevel + nextlevel = [] + for v in thislevel: + for w in Gsucc[v]: + if w not in seen: + seen.add(w) + nextlevel.append(w) + yield w + for w in Gpred[v]: + if w not in seen: + seen.add(w) + nextlevel.append(w) + yield w + if len(seen) == n: + return diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__init__.py new file mode 100644 index 0000000..d08a360 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__init__.py @@ -0,0 +1,11 @@ +"""Connectivity and cut algorithms""" + +from .connectivity import * +from .cuts import * +from .edge_augmentation import * +from .edge_kcomponents import * +from .disjoint_paths import * +from .kcomponents import * +from .kcutsets import * +from .stoerwagner import * +from .utils import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..91bacd7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/connectivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/connectivity.cpython-312.pyc new file mode 100644 index 0000000..474e390 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/connectivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/cuts.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/cuts.cpython-312.pyc new file mode 100644 index 0000000..14d7804 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/cuts.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-312.pyc new file mode 100644 index 0000000..978a51d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/disjoint_paths.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-312.pyc new file mode 100644 index 0000000..43d67ed Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/edge_augmentation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-312.pyc new file mode 100644 index 0000000..914ddfe Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/edge_kcomponents.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-312.pyc new file mode 100644 index 0000000..fff1b09 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/kcomponents.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-312.pyc new file mode 100644 index 0000000..17b46cd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/kcutsets.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-312.pyc new file mode 100644 index 0000000..8234335 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/stoerwagner.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..b4a2013 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/__pycache__/utils.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/connectivity.py new file mode 100644 index 0000000..210413f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/connectivity.py @@ -0,0 +1,811 @@ +""" +Flow based connectivity algorithms +""" + +import itertools +from operator import itemgetter + +import networkx as nx + +# Define the default maximum flow function to use in all flow based +# connectivity algorithms. +from networkx.algorithms.flow import ( + boykov_kolmogorov, + build_residual_network, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +) + +from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity + +default_flow_func = edmonds_karp + +__all__ = [ + "average_node_connectivity", + "local_node_connectivity", + "node_connectivity", + "local_edge_connectivity", + "edge_connectivity", + "all_pairs_node_connectivity", +] + + +@nx._dispatchable(graphs={"G": 0, "auxiliary?": 4}, preserve_graph_attrs={"auxiliary"}) +def local_node_connectivity( + G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None +): + r"""Computes local node connectivity for nodes s and t. + + Local node connectivity for two non adjacent nodes s and t is the + minimum number of nodes that must be removed (along with their incident + edges) to disconnect them. + + This is a flow based implementation of node connectivity. We compute the + maximum flow on an auxiliary digraph build from the original input + graph (see below for details). + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + s : node + Source node + + t : node + Target node + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The choice + of the default function may change from version to version and + should not be relied on. Default value: None. + + auxiliary : NetworkX DiGraph + Auxiliary digraph to compute flow based node connectivity. It has + to have a graph attribute called mapping with a dictionary mapping + node names in G and in the auxiliary digraph. If provided + it will be reused instead of recreated. Default value: None. + + residual : NetworkX DiGraph + Residual network to compute maximum flow. If provided it will be + reused instead of recreated. Default value: None. + + cutoff : integer, float, or None (default: None) + If specified, the maximum flow algorithm will terminate when the + flow value reaches or exceeds the cutoff. This only works for flows + that support the cutoff parameter (most do) and is ignored otherwise. + + Returns + ------- + K : integer + local node connectivity for nodes s and t + + Examples + -------- + This function is not imported in the base NetworkX namespace, so you + have to explicitly import it from the connectivity package: + + >>> from networkx.algorithms.connectivity import local_node_connectivity + + We use in this example the platonic icosahedral graph, which has node + connectivity 5. + + >>> G = nx.icosahedral_graph() + >>> local_node_connectivity(G, 0, 6) + 5 + + If you need to compute local connectivity on several pairs of + nodes in the same graph, it is recommended that you reuse the + data structures that NetworkX uses in the computation: the + auxiliary digraph for node connectivity, and the residual + network for the underlying maximum flow computation. + + Example of how to compute local node connectivity among + all pairs of nodes of the platonic icosahedral graph reusing + the data structures. + + >>> import itertools + >>> # You also have to explicitly import the function for + >>> # building the auxiliary digraph from the connectivity package + >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity + >>> H = build_auxiliary_node_connectivity(G) + >>> # And the function for building the residual network from the + >>> # flow package + >>> from networkx.algorithms.flow import build_residual_network + >>> # Note that the auxiliary digraph has an edge attribute named capacity + >>> R = build_residual_network(H, "capacity") + >>> result = dict.fromkeys(G, dict()) + >>> # Reuse the auxiliary digraph and the residual network by passing them + >>> # as parameters + >>> for u, v in itertools.combinations(G, 2): + ... k = local_node_connectivity(G, u, v, auxiliary=H, residual=R) + ... result[u][v] = k + >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2)) + True + + You can also use alternative flow algorithms for computing node + connectivity. For instance, in dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better than + the default :meth:`edmonds_karp` which is faster for sparse + networks with highly skewed degree distributions. Alternative flow + functions have to be explicitly imported from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> local_node_connectivity(G, 0, 6, flow_func=shortest_augmenting_path) + 5 + + Notes + ----- + This is a flow based implementation of node connectivity. We compute the + maximum flow using, by default, the :meth:`edmonds_karp` algorithm (see: + :meth:`maximum_flow`) on an auxiliary digraph build from the original + input graph: + + For an undirected graph G having `n` nodes and `m` edges we derive a + directed graph H with `2n` nodes and `2m+n` arcs by replacing each + original node `v` with two nodes `v_A`, `v_B` linked by an (internal) + arc in H. Then for each edge (`u`, `v`) in G we add two arcs + (`u_B`, `v_A`) and (`v_B`, `u_A`) in H. Finally we set the attribute + capacity = 1 for each arc in H [1]_ . + + For a directed graph G having `n` nodes and `m` arcs we derive a + directed graph H with `2n` nodes and `m+n` arcs by replacing each + original node `v` with two nodes `v_A`, `v_B` linked by an (internal) + arc (`v_A`, `v_B`) in H. Then for each arc (`u`, `v`) in G we add one arc + (`u_B`, `v_A`) in H. Finally we set the attribute capacity = 1 for + each arc in H. + + This is equal to the local node connectivity because the value of + a maximum s-t-flow is equal to the capacity of a minimum s-t-cut. + + See also + -------- + :meth:`local_edge_connectivity` + :meth:`node_connectivity` + :meth:`minimum_node_cut` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + References + ---------- + .. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and + Erlebach, 'Network Analysis: Methodological Foundations', Lecture + Notes in Computer Science, Volume 3418, Springer-Verlag, 2005. + http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf + + """ + if flow_func is None: + flow_func = default_flow_func + + if auxiliary is None: + H = build_auxiliary_node_connectivity(G) + else: + H = auxiliary + + mapping = H.graph.get("mapping", None) + if mapping is None: + raise nx.NetworkXError("Invalid auxiliary digraph.") + + kwargs = {"flow_func": flow_func, "residual": residual} + + if flow_func is not preflow_push: + kwargs["cutoff"] = cutoff + + if flow_func is shortest_augmenting_path: + kwargs["two_phase"] = True + + return nx.maximum_flow_value(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) + + +@nx._dispatchable +def node_connectivity(G, s=None, t=None, flow_func=None): + r"""Returns node connectivity for a graph or digraph G. + + Node connectivity is equal to the minimum number of nodes that + must be removed to disconnect G or render it trivial. If source + and target nodes are provided, this function returns the local node + connectivity: the minimum number of nodes that must be removed to break + all paths from source to target in G. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + s : node + Source node. Optional. Default value: None. + + t : node + Target node. Optional. Default value: None. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The + choice of the default function may change from version + to version and should not be relied on. Default value: None. + + Returns + ------- + K : integer + Node connectivity of G, or local node connectivity if source + and target are provided. + + Examples + -------- + >>> # Platonic icosahedral graph is 5-node-connected + >>> G = nx.icosahedral_graph() + >>> nx.node_connectivity(G) + 5 + + You can use alternative flow algorithms for the underlying maximum + flow computation. In dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better + than the default :meth:`edmonds_karp`, which is faster for + sparse networks with highly skewed degree distributions. Alternative + flow functions have to be explicitly imported from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> nx.node_connectivity(G, flow_func=shortest_augmenting_path) + 5 + + If you specify a pair of nodes (source and target) as parameters, + this function returns the value of local node connectivity. + + >>> nx.node_connectivity(G, 3, 7) + 5 + + If you need to perform several local computations among different + pairs of nodes on the same graph, it is recommended that you reuse + the data structures used in the maximum flow computations. See + :meth:`local_node_connectivity` for details. + + Notes + ----- + This is a flow based implementation of node connectivity. The + algorithm works by solving $O((n-\delta-1+\delta(\delta-1)/2))$ + maximum flow problems on an auxiliary digraph. Where $\delta$ + is the minimum degree of G. For details about the auxiliary + digraph and the computation of local node connectivity see + :meth:`local_node_connectivity`. This implementation is based + on algorithm 11 in [1]_. + + See also + -------- + :meth:`local_node_connectivity` + :meth:`edge_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + References + ---------- + .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. + http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf + + """ + if (s is not None and t is None) or (s is None and t is not None): + raise nx.NetworkXError("Both source and target must be specified.") + + # Local node connectivity + if s is not None and t is not None: + if s not in G: + raise nx.NetworkXError(f"node {s} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {t} not in graph") + return local_node_connectivity(G, s, t, flow_func=flow_func) + + # Global node connectivity + if G.is_directed(): + if not nx.is_weakly_connected(G): + return 0 + iter_func = itertools.permutations + # It is necessary to consider both predecessors + # and successors for directed graphs + + def neighbors(v): + return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) + + else: + if not nx.is_connected(G): + return 0 + iter_func = itertools.combinations + neighbors = G.neighbors + + # Reuse the auxiliary digraph and the residual network + H = build_auxiliary_node_connectivity(G) + R = build_residual_network(H, "capacity") + kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R} + + # Pick a node with minimum degree + # Node connectivity is bounded by degree. + v, K = min(G.degree(), key=itemgetter(1)) + # compute local node connectivity with all its non-neighbors nodes + for w in set(G) - set(neighbors(v)) - {v}: + kwargs["cutoff"] = K + K = min(K, local_node_connectivity(G, v, w, **kwargs)) + # Also for non adjacent pairs of neighbors of v + for x, y in iter_func(neighbors(v), 2): + if y in G[x]: + continue + kwargs["cutoff"] = K + K = min(K, local_node_connectivity(G, x, y, **kwargs)) + + return K + + +@nx._dispatchable +def average_node_connectivity(G, flow_func=None): + r"""Returns the average connectivity of a graph G. + + The average connectivity `\bar{\kappa}` of a graph G is the average + of local node connectivity over all pairs of nodes of G [1]_ . + + .. math:: + + \bar{\kappa}(G) = \frac{\sum_{u,v} \kappa_{G}(u,v)}{{n \choose 2}} + + Parameters + ---------- + + G : NetworkX graph + Undirected graph + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See :meth:`local_node_connectivity` + for details. The choice of the default function may change from + version to version and should not be relied on. Default value: None. + + Returns + ------- + K : float + Average node connectivity + + See also + -------- + :meth:`local_node_connectivity` + :meth:`node_connectivity` + :meth:`edge_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + References + ---------- + .. [1] Beineke, L., O. Oellermann, and R. Pippert (2002). The average + connectivity of a graph. Discrete mathematics 252(1-3), 31-45. + http://www.sciencedirect.com/science/article/pii/S0012365X01001807 + + """ + if G.is_directed(): + iter_func = itertools.permutations + else: + iter_func = itertools.combinations + + # Reuse the auxiliary digraph and the residual network + H = build_auxiliary_node_connectivity(G) + R = build_residual_network(H, "capacity") + kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R} + + num, den = 0, 0 + for u, v in iter_func(G, 2): + num += local_node_connectivity(G, u, v, **kwargs) + den += 1 + + if den == 0: # Null Graph + return 0 + return num / den + + +@nx._dispatchable +def all_pairs_node_connectivity(G, nbunch=None, flow_func=None): + """Compute node connectivity between all pairs of nodes of G. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + nbunch: container + Container of nodes. If provided node connectivity will be computed + only over pairs of nodes in nbunch. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The + choice of the default function may change from version + to version and should not be relied on. Default value: None. + + Returns + ------- + all_pairs : dict + A dictionary with node connectivity between all pairs of nodes + in G, or in nbunch if provided. + + See also + -------- + :meth:`local_node_connectivity` + :meth:`edge_connectivity` + :meth:`local_edge_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + """ + if nbunch is None: + nbunch = G + else: + nbunch = set(nbunch) + + directed = G.is_directed() + if directed: + iter_func = itertools.permutations + else: + iter_func = itertools.combinations + + all_pairs = {n: {} for n in nbunch} + + # Reuse auxiliary digraph and residual network + H = build_auxiliary_node_connectivity(G) + mapping = H.graph["mapping"] + R = build_residual_network(H, "capacity") + kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R} + + for u, v in iter_func(nbunch, 2): + K = local_node_connectivity(G, u, v, **kwargs) + all_pairs[u][v] = K + if not directed: + all_pairs[v][u] = K + + return all_pairs + + +@nx._dispatchable(graphs={"G": 0, "auxiliary?": 4}) +def local_edge_connectivity( + G, s, t, flow_func=None, auxiliary=None, residual=None, cutoff=None +): + r"""Returns local edge connectivity for nodes s and t in G. + + Local edge connectivity for two nodes s and t is the minimum number + of edges that must be removed to disconnect them. + + This is a flow based implementation of edge connectivity. We compute the + maximum flow on an auxiliary digraph build from the original + network (see below for details). This is equal to the local edge + connectivity because the value of a maximum s-t-flow is equal to the + capacity of a minimum s-t-cut (Ford and Fulkerson theorem) [1]_ . + + Parameters + ---------- + G : NetworkX graph + Undirected or directed graph + + s : node + Source node + + t : node + Target node + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The + choice of the default function may change from version + to version and should not be relied on. Default value: None. + + auxiliary : NetworkX DiGraph + Auxiliary digraph for computing flow based edge connectivity. If + provided it will be reused instead of recreated. Default value: None. + + residual : NetworkX DiGraph + Residual network to compute maximum flow. If provided it will be + reused instead of recreated. Default value: None. + + cutoff : integer, float, or None (default: None) + If specified, the maximum flow algorithm will terminate when the + flow value reaches or exceeds the cutoff. This only works for flows + that support the cutoff parameter (most do) and is ignored otherwise. + + Returns + ------- + K : integer + local edge connectivity for nodes s and t. + + Examples + -------- + This function is not imported in the base NetworkX namespace, so you + have to explicitly import it from the connectivity package: + + >>> from networkx.algorithms.connectivity import local_edge_connectivity + + We use in this example the platonic icosahedral graph, which has edge + connectivity 5. + + >>> G = nx.icosahedral_graph() + >>> local_edge_connectivity(G, 0, 6) + 5 + + If you need to compute local connectivity on several pairs of + nodes in the same graph, it is recommended that you reuse the + data structures that NetworkX uses in the computation: the + auxiliary digraph for edge connectivity, and the residual + network for the underlying maximum flow computation. + + Example of how to compute local edge connectivity among + all pairs of nodes of the platonic icosahedral graph reusing + the data structures. + + >>> import itertools + >>> # You also have to explicitly import the function for + >>> # building the auxiliary digraph from the connectivity package + >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity + >>> H = build_auxiliary_edge_connectivity(G) + >>> # And the function for building the residual network from the + >>> # flow package + >>> from networkx.algorithms.flow import build_residual_network + >>> # Note that the auxiliary digraph has an edge attribute named capacity + >>> R = build_residual_network(H, "capacity") + >>> result = dict.fromkeys(G, dict()) + >>> # Reuse the auxiliary digraph and the residual network by passing them + >>> # as parameters + >>> for u, v in itertools.combinations(G, 2): + ... k = local_edge_connectivity(G, u, v, auxiliary=H, residual=R) + ... result[u][v] = k + >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2)) + True + + You can also use alternative flow algorithms for computing edge + connectivity. For instance, in dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better than + the default :meth:`edmonds_karp` which is faster for sparse + networks with highly skewed degree distributions. Alternative flow + functions have to be explicitly imported from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> local_edge_connectivity(G, 0, 6, flow_func=shortest_augmenting_path) + 5 + + Notes + ----- + This is a flow based implementation of edge connectivity. We compute the + maximum flow using, by default, the :meth:`edmonds_karp` algorithm on an + auxiliary digraph build from the original input graph: + + If the input graph is undirected, we replace each edge (`u`,`v`) with + two reciprocal arcs (`u`, `v`) and (`v`, `u`) and then we set the attribute + 'capacity' for each arc to 1. If the input graph is directed we simply + add the 'capacity' attribute. This is an implementation of algorithm 1 + in [1]_. + + The maximum flow in the auxiliary network is equal to the local edge + connectivity because the value of a maximum s-t-flow is equal to the + capacity of a minimum s-t-cut (Ford and Fulkerson theorem). + + See also + -------- + :meth:`edge_connectivity` + :meth:`local_node_connectivity` + :meth:`node_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + References + ---------- + .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. + http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf + + """ + if flow_func is None: + flow_func = default_flow_func + + if auxiliary is None: + H = build_auxiliary_edge_connectivity(G) + else: + H = auxiliary + + kwargs = {"flow_func": flow_func, "residual": residual} + + if flow_func is not preflow_push: + kwargs["cutoff"] = cutoff + + if flow_func is shortest_augmenting_path: + kwargs["two_phase"] = True + + return nx.maximum_flow_value(H, s, t, **kwargs) + + +@nx._dispatchable +def edge_connectivity(G, s=None, t=None, flow_func=None, cutoff=None): + r"""Returns the edge connectivity of the graph or digraph G. + + The edge connectivity is equal to the minimum number of edges that + must be removed to disconnect G or render it trivial. If source + and target nodes are provided, this function returns the local edge + connectivity: the minimum number of edges that must be removed to + break all paths from source to target in G. + + Parameters + ---------- + G : NetworkX graph + Undirected or directed graph + + s : node + Source node. Optional. Default value: None. + + t : node + Target node. Optional. Default value: None. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The + choice of the default function may change from version + to version and should not be relied on. Default value: None. + + cutoff : integer, float, or None (default: None) + If specified, the maximum flow algorithm will terminate when the + flow value reaches or exceeds the cutoff. This only works for flows + that support the cutoff parameter (most do) and is ignored otherwise. + + Returns + ------- + K : integer + Edge connectivity for G, or local edge connectivity if source + and target were provided + + Examples + -------- + >>> # Platonic icosahedral graph is 5-edge-connected + >>> G = nx.icosahedral_graph() + >>> nx.edge_connectivity(G) + 5 + + You can use alternative flow algorithms for the underlying + maximum flow computation. In dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better + than the default :meth:`edmonds_karp`, which is faster for + sparse networks with highly skewed degree distributions. + Alternative flow functions have to be explicitly imported + from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> nx.edge_connectivity(G, flow_func=shortest_augmenting_path) + 5 + + If you specify a pair of nodes (source and target) as parameters, + this function returns the value of local edge connectivity. + + >>> nx.edge_connectivity(G, 3, 7) + 5 + + If you need to perform several local computations among different + pairs of nodes on the same graph, it is recommended that you reuse + the data structures used in the maximum flow computations. See + :meth:`local_edge_connectivity` for details. + + Notes + ----- + This is a flow based implementation of global edge connectivity. + For undirected graphs the algorithm works by finding a 'small' + dominating set of nodes of G (see algorithm 7 in [1]_ ) and + computing local maximum flow (see :meth:`local_edge_connectivity`) + between an arbitrary node in the dominating set and the rest of + nodes in it. This is an implementation of algorithm 6 in [1]_ . + For directed graphs, the algorithm does n calls to the maximum + flow function. This is an implementation of algorithm 8 in [1]_ . + + See also + -------- + :meth:`local_edge_connectivity` + :meth:`local_node_connectivity` + :meth:`node_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + :meth:`k_edge_components` + :meth:`k_edge_subgraphs` + + References + ---------- + .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. + http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf + + """ + if (s is not None and t is None) or (s is None and t is not None): + raise nx.NetworkXError("Both source and target must be specified.") + + # Local edge connectivity + if s is not None and t is not None: + if s not in G: + raise nx.NetworkXError(f"node {s} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {t} not in graph") + return local_edge_connectivity(G, s, t, flow_func=flow_func, cutoff=cutoff) + + # Global edge connectivity + # reuse auxiliary digraph and residual network + H = build_auxiliary_edge_connectivity(G) + R = build_residual_network(H, "capacity") + kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R} + + if G.is_directed(): + # Algorithm 8 in [1] + if not nx.is_weakly_connected(G): + return 0 + + # initial value for \lambda is minimum degree + L = min(d for n, d in G.degree()) + nodes = list(G) + n = len(nodes) + + if cutoff is not None: + L = min(cutoff, L) + + for i in range(n): + kwargs["cutoff"] = L + try: + L = min(L, local_edge_connectivity(G, nodes[i], nodes[i + 1], **kwargs)) + except IndexError: # last node! + L = min(L, local_edge_connectivity(G, nodes[i], nodes[0], **kwargs)) + return L + else: # undirected + # Algorithm 6 in [1] + if not nx.is_connected(G): + return 0 + + # initial value for \lambda is minimum degree + L = min(d for n, d in G.degree()) + + if cutoff is not None: + L = min(cutoff, L) + + # A dominating set is \lambda-covering + # We need a dominating set with at least two nodes + for node in G: + D = nx.dominating_set(G, start_with=node) + v = D.pop() + if D: + break + else: + # in complete graphs the dominating sets will always be of one node + # thus we return min degree + return L + + for w in D: + kwargs["cutoff"] = L + L = min(L, local_edge_connectivity(G, v, w, **kwargs)) + + return L diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/cuts.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/cuts.py new file mode 100644 index 0000000..e7806e1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/cuts.py @@ -0,0 +1,616 @@ +""" +Flow based cut algorithms +""" + +import itertools + +import networkx as nx + +# Define the default maximum flow function to use in all flow based +# cut algorithms. +from networkx.algorithms.flow import build_residual_network, edmonds_karp + +from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity + +default_flow_func = edmonds_karp + +__all__ = [ + "minimum_st_node_cut", + "minimum_node_cut", + "minimum_st_edge_cut", + "minimum_edge_cut", +] + + +@nx._dispatchable( + graphs={"G": 0, "auxiliary?": 4}, + preserve_edge_attrs={"auxiliary": {"capacity": float("inf")}}, + preserve_graph_attrs={"auxiliary"}, +) +def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): + """Returns the edges of the cut-set of a minimum (s, t)-cut. + + This function returns the set of edges of minimum cardinality that, + if removed, would destroy all paths among source and target in G. + Edge weights are not considered. See :meth:`minimum_cut` for + computing minimum cuts considering edge weights. + + Parameters + ---------- + G : NetworkX graph + + s : node + Source node for the flow. + + t : node + Sink node for the flow. + + auxiliary : NetworkX DiGraph + Auxiliary digraph to compute flow based node connectivity. It has + to have a graph attribute called mapping with a dictionary mapping + node names in G and in the auxiliary digraph. If provided + it will be reused instead of recreated. Default value: None. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See :meth:`node_connectivity` for + details. The choice of the default function may change from version + to version and should not be relied on. Default value: None. + + residual : NetworkX DiGraph + Residual network to compute maximum flow. If provided it will be + reused instead of recreated. Default value: None. + + Returns + ------- + cutset : set + Set of edges that, if removed from the graph, will disconnect it. + + See also + -------- + :meth:`minimum_cut` + :meth:`minimum_node_cut` + :meth:`minimum_edge_cut` + :meth:`stoer_wagner` + :meth:`node_connectivity` + :meth:`edge_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Examples + -------- + This function is not imported in the base NetworkX namespace, so you + have to explicitly import it from the connectivity package: + + >>> from networkx.algorithms.connectivity import minimum_st_edge_cut + + We use in this example the platonic icosahedral graph, which has edge + connectivity 5. + + >>> G = nx.icosahedral_graph() + >>> len(minimum_st_edge_cut(G, 0, 6)) + 5 + + If you need to compute local edge cuts on several pairs of + nodes in the same graph, it is recommended that you reuse the + data structures that NetworkX uses in the computation: the + auxiliary digraph for edge connectivity, and the residual + network for the underlying maximum flow computation. + + Example of how to compute local edge cuts among all pairs of + nodes of the platonic icosahedral graph reusing the data + structures. + + >>> import itertools + >>> # You also have to explicitly import the function for + >>> # building the auxiliary digraph from the connectivity package + >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity + >>> H = build_auxiliary_edge_connectivity(G) + >>> # And the function for building the residual network from the + >>> # flow package + >>> from networkx.algorithms.flow import build_residual_network + >>> # Note that the auxiliary digraph has an edge attribute named capacity + >>> R = build_residual_network(H, "capacity") + >>> result = dict.fromkeys(G, dict()) + >>> # Reuse the auxiliary digraph and the residual network by passing them + >>> # as parameters + >>> for u, v in itertools.combinations(G, 2): + ... k = len(minimum_st_edge_cut(G, u, v, auxiliary=H, residual=R)) + ... result[u][v] = k + >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2)) + True + + You can also use alternative flow algorithms for computing edge + cuts. For instance, in dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better than + the default :meth:`edmonds_karp` which is faster for sparse + networks with highly skewed degree distributions. Alternative flow + functions have to be explicitly imported from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> len(minimum_st_edge_cut(G, 0, 6, flow_func=shortest_augmenting_path)) + 5 + + """ + if flow_func is None: + flow_func = default_flow_func + + if auxiliary is None: + H = build_auxiliary_edge_connectivity(G) + else: + H = auxiliary + + kwargs = {"capacity": "capacity", "flow_func": flow_func, "residual": residual} + + cut_value, partition = nx.minimum_cut(H, s, t, **kwargs) + reachable, non_reachable = partition + # Any edge in the original graph linking the two sets in the + # partition is part of the edge cutset + cutset = set() + for u, nbrs in ((n, G[n]) for n in reachable): + cutset.update((u, v) for v in nbrs if v in non_reachable) + + return cutset + + +@nx._dispatchable( + graphs={"G": 0, "auxiliary?": 4}, + preserve_node_attrs={"auxiliary": {"id": None}}, + preserve_graph_attrs={"auxiliary"}, +) +def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None): + r"""Returns a set of nodes of minimum cardinality that disconnect source + from target in G. + + This function returns the set of nodes of minimum cardinality that, + if removed, would destroy all paths among source and target in G. + + Parameters + ---------- + G : NetworkX graph + + s : node + Source node. + + t : node + Target node. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The choice + of the default function may change from version to version and + should not be relied on. Default value: None. + + auxiliary : NetworkX DiGraph + Auxiliary digraph to compute flow based node connectivity. It has + to have a graph attribute called mapping with a dictionary mapping + node names in G and in the auxiliary digraph. If provided + it will be reused instead of recreated. Default value: None. + + residual : NetworkX DiGraph + Residual network to compute maximum flow. If provided it will be + reused instead of recreated. Default value: None. + + Returns + ------- + cutset : set + Set of nodes that, if removed, would destroy all paths between + source and target in G. + + Returns an empty set if source and target are either in different + components or are directly connected by an edge, as no node removal + can destroy the path. + + Examples + -------- + This function is not imported in the base NetworkX namespace, so you + have to explicitly import it from the connectivity package: + + >>> from networkx.algorithms.connectivity import minimum_st_node_cut + + We use in this example the platonic icosahedral graph, which has node + connectivity 5. + + >>> G = nx.icosahedral_graph() + >>> len(minimum_st_node_cut(G, 0, 6)) + 5 + + If you need to compute local st cuts between several pairs of + nodes in the same graph, it is recommended that you reuse the + data structures that NetworkX uses in the computation: the + auxiliary digraph for node connectivity and node cuts, and the + residual network for the underlying maximum flow computation. + + Example of how to compute local st node cuts reusing the data + structures: + + >>> # You also have to explicitly import the function for + >>> # building the auxiliary digraph from the connectivity package + >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity + >>> H = build_auxiliary_node_connectivity(G) + >>> # And the function for building the residual network from the + >>> # flow package + >>> from networkx.algorithms.flow import build_residual_network + >>> # Note that the auxiliary digraph has an edge attribute named capacity + >>> R = build_residual_network(H, "capacity") + >>> # Reuse the auxiliary digraph and the residual network by passing them + >>> # as parameters + >>> len(minimum_st_node_cut(G, 0, 6, auxiliary=H, residual=R)) + 5 + + You can also use alternative flow algorithms for computing minimum st + node cuts. For instance, in dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better than + the default :meth:`edmonds_karp` which is faster for sparse + networks with highly skewed degree distributions. Alternative flow + functions have to be explicitly imported from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> len(minimum_st_node_cut(G, 0, 6, flow_func=shortest_augmenting_path)) + 5 + + Notes + ----- + This is a flow based implementation of minimum node cut. The algorithm + is based in solving a number of maximum flow computations to determine + the capacity of the minimum cut on an auxiliary directed network that + corresponds to the minimum node cut of G. It handles both directed + and undirected graphs. This implementation is based on algorithm 11 + in [1]_. + + See also + -------- + :meth:`minimum_node_cut` + :meth:`minimum_edge_cut` + :meth:`stoer_wagner` + :meth:`node_connectivity` + :meth:`edge_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + References + ---------- + .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. + http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf + + """ + if auxiliary is None: + H = build_auxiliary_node_connectivity(G) + else: + H = auxiliary + + mapping = H.graph.get("mapping", None) + if mapping is None: + raise nx.NetworkXError("Invalid auxiliary digraph.") + if G.has_edge(s, t) or G.has_edge(t, s): + return set() + kwargs = {"flow_func": flow_func, "residual": residual, "auxiliary": H} + + # The edge cut in the auxiliary digraph corresponds to the node cut in the + # original graph. + edge_cut = minimum_st_edge_cut(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) + # Each node in the original graph maps to two nodes of the auxiliary graph + node_cut = {H.nodes[node]["id"] for edge in edge_cut for node in edge} + return node_cut - {s, t} + + +@nx._dispatchable +def minimum_node_cut(G, s=None, t=None, flow_func=None): + r"""Returns a set of nodes of minimum cardinality that disconnects G. + + If source and target nodes are provided, this function returns the + set of nodes of minimum cardinality that, if removed, would destroy + all paths among source and target in G. If not, it returns a set + of nodes of minimum cardinality that disconnects G. + + Parameters + ---------- + G : NetworkX graph + + s : node + Source node. Optional. Default value: None. + + t : node + Target node. Optional. Default value: None. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The + choice of the default function may change from version + to version and should not be relied on. Default value: None. + + Returns + ------- + cutset : set + Set of nodes that, if removed, would disconnect G. If source + and target nodes are provided, the set contains the nodes that + if removed, would destroy all paths between source and target. + + Examples + -------- + >>> # Platonic icosahedral graph has node connectivity 5 + >>> G = nx.icosahedral_graph() + >>> node_cut = nx.minimum_node_cut(G) + >>> len(node_cut) + 5 + + You can use alternative flow algorithms for the underlying maximum + flow computation. In dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better + than the default :meth:`edmonds_karp`, which is faster for + sparse networks with highly skewed degree distributions. Alternative + flow functions have to be explicitly imported from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> node_cut == nx.minimum_node_cut(G, flow_func=shortest_augmenting_path) + True + + If you specify a pair of nodes (source and target) as parameters, + this function returns a local st node cut. + + >>> len(nx.minimum_node_cut(G, 3, 7)) + 5 + + If you need to perform several local st cuts among different + pairs of nodes on the same graph, it is recommended that you reuse + the data structures used in the maximum flow computations. See + :meth:`minimum_st_node_cut` for details. + + Notes + ----- + This is a flow based implementation of minimum node cut. The algorithm + is based in solving a number of maximum flow computations to determine + the capacity of the minimum cut on an auxiliary directed network that + corresponds to the minimum node cut of G. It handles both directed + and undirected graphs. This implementation is based on algorithm 11 + in [1]_. + + See also + -------- + :meth:`minimum_st_node_cut` + :meth:`minimum_cut` + :meth:`minimum_edge_cut` + :meth:`stoer_wagner` + :meth:`node_connectivity` + :meth:`edge_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + References + ---------- + .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. + http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf + + """ + if (s is not None and t is None) or (s is None and t is not None): + raise nx.NetworkXError("Both source and target must be specified.") + + # Local minimum node cut. + if s is not None and t is not None: + if s not in G: + raise nx.NetworkXError(f"node {s} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {t} not in graph") + return minimum_st_node_cut(G, s, t, flow_func=flow_func) + + # Global minimum node cut. + # Analog to the algorithm 11 for global node connectivity in [1]. + if G.is_directed(): + if not nx.is_weakly_connected(G): + raise nx.NetworkXError("Input graph is not connected") + iter_func = itertools.permutations + + def neighbors(v): + return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)]) + + else: + if not nx.is_connected(G): + raise nx.NetworkXError("Input graph is not connected") + iter_func = itertools.combinations + neighbors = G.neighbors + + # Reuse the auxiliary digraph and the residual network. + H = build_auxiliary_node_connectivity(G) + R = build_residual_network(H, "capacity") + kwargs = {"flow_func": flow_func, "auxiliary": H, "residual": R} + + # Choose a node with minimum degree. + v = min(G, key=G.degree) + # Initial node cutset is all neighbors of the node with minimum degree. + min_cut = set(G[v]) + # Compute st node cuts between v and all its non-neighbors nodes in G. + for w in set(G) - set(neighbors(v)) - {v}: + this_cut = minimum_st_node_cut(G, v, w, **kwargs) + if len(min_cut) >= len(this_cut): + min_cut = this_cut + # Also for non adjacent pairs of neighbors of v. + for x, y in iter_func(neighbors(v), 2): + if y in G[x]: + continue + this_cut = minimum_st_node_cut(G, x, y, **kwargs) + if len(min_cut) >= len(this_cut): + min_cut = this_cut + + return min_cut + + +@nx._dispatchable +def minimum_edge_cut(G, s=None, t=None, flow_func=None): + r"""Returns a set of edges of minimum cardinality that disconnects G. + + If source and target nodes are provided, this function returns the + set of edges of minimum cardinality that, if removed, would break + all paths among source and target in G. If not, it returns a set of + edges of minimum cardinality that disconnects G. + + Parameters + ---------- + G : NetworkX graph + + s : node + Source node. Optional. Default value: None. + + t : node + Target node. Optional. Default value: None. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The + choice of the default function may change from version + to version and should not be relied on. Default value: None. + + Returns + ------- + cutset : set + Set of edges that, if removed, would disconnect G. If source + and target nodes are provided, the set contains the edges that + if removed, would destroy all paths between source and target. + + Examples + -------- + >>> # Platonic icosahedral graph has edge connectivity 5 + >>> G = nx.icosahedral_graph() + >>> len(nx.minimum_edge_cut(G)) + 5 + + You can use alternative flow algorithms for the underlying + maximum flow computation. In dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better + than the default :meth:`edmonds_karp`, which is faster for + sparse networks with highly skewed degree distributions. + Alternative flow functions have to be explicitly imported + from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> len(nx.minimum_edge_cut(G, flow_func=shortest_augmenting_path)) + 5 + + If you specify a pair of nodes (source and target) as parameters, + this function returns the value of local edge connectivity. + + >>> nx.edge_connectivity(G, 3, 7) + 5 + + If you need to perform several local computations among different + pairs of nodes on the same graph, it is recommended that you reuse + the data structures used in the maximum flow computations. See + :meth:`local_edge_connectivity` for details. + + Notes + ----- + This is a flow based implementation of minimum edge cut. For + undirected graphs the algorithm works by finding a 'small' dominating + set of nodes of G (see algorithm 7 in [1]_) and computing the maximum + flow between an arbitrary node in the dominating set and the rest of + nodes in it. This is an implementation of algorithm 6 in [1]_. For + directed graphs, the algorithm does n calls to the max flow function. + The function raises an error if the directed graph is not weakly + connected and returns an empty set if it is weakly connected. + It is an implementation of algorithm 8 in [1]_. + + See also + -------- + :meth:`minimum_st_edge_cut` + :meth:`minimum_node_cut` + :meth:`stoer_wagner` + :meth:`node_connectivity` + :meth:`edge_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + References + ---------- + .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. + http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf + + """ + if (s is not None and t is None) or (s is None and t is not None): + raise nx.NetworkXError("Both source and target must be specified.") + + # reuse auxiliary digraph and residual network + H = build_auxiliary_edge_connectivity(G) + R = build_residual_network(H, "capacity") + kwargs = {"flow_func": flow_func, "residual": R, "auxiliary": H} + + # Local minimum edge cut if s and t are not None + if s is not None and t is not None: + if s not in G: + raise nx.NetworkXError(f"node {s} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {t} not in graph") + return minimum_st_edge_cut(H, s, t, **kwargs) + + # Global minimum edge cut + # Analog to the algorithm for global edge connectivity + if G.is_directed(): + # Based on algorithm 8 in [1] + if not nx.is_weakly_connected(G): + raise nx.NetworkXError("Input graph is not connected") + + # Initial cutset is all edges of a node with minimum degree + node = min(G, key=G.degree) + min_cut = set(G.edges(node)) + nodes = list(G) + n = len(nodes) + for i in range(n): + try: + this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i + 1], **kwargs) + if len(this_cut) <= len(min_cut): + min_cut = this_cut + except IndexError: # Last node! + this_cut = minimum_st_edge_cut(H, nodes[i], nodes[0], **kwargs) + if len(this_cut) <= len(min_cut): + min_cut = this_cut + + return min_cut + + else: # undirected + # Based on algorithm 6 in [1] + if not nx.is_connected(G): + raise nx.NetworkXError("Input graph is not connected") + + # Initial cutset is all edges of a node with minimum degree + node = min(G, key=G.degree) + min_cut = set(G.edges(node)) + # A dominating set is \lambda-covering + # We need a dominating set with at least two nodes + for node in G: + D = nx.dominating_set(G, start_with=node) + v = D.pop() + if D: + break + else: + # in complete graphs the dominating set will always be of one node + # thus we return min_cut, which now contains the edges of a node + # with minimum degree + return min_cut + for w in D: + this_cut = minimum_st_edge_cut(H, v, w, **kwargs) + if len(this_cut) <= len(min_cut): + min_cut = this_cut + + return min_cut diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/disjoint_paths.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/disjoint_paths.py new file mode 100644 index 0000000..fdd8522 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/disjoint_paths.py @@ -0,0 +1,408 @@ +"""Flow based node and edge disjoint paths.""" + +from itertools import filterfalse as _filterfalse + +import networkx as nx + +# Define the default maximum flow function to use for the underlying +# maximum flow computations +from networkx.algorithms.flow import ( + edmonds_karp, + preflow_push, + shortest_augmenting_path, +) +from networkx.exception import NetworkXNoPath + +# Functions to build auxiliary data structures. +from .utils import build_auxiliary_edge_connectivity, build_auxiliary_node_connectivity + +__all__ = ["edge_disjoint_paths", "node_disjoint_paths"] +default_flow_func = edmonds_karp + + +@nx._dispatchable( + graphs={"G": 0, "auxiliary?": 5}, + preserve_edge_attrs={"auxiliary": {"capacity": float("inf")}}, +) +def edge_disjoint_paths( + G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None +): + """Returns the edges disjoint paths between source and target. + + Edge disjoint paths are paths that do not share any edge. The + number of edge disjoint paths between source and target is equal + to their edge connectivity. + + Parameters + ---------- + G : NetworkX graph + + s : node + Source node for the flow. + + t : node + Sink node for the flow. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. The choice of the default function + may change from version to version and should not be relied on. + Default value: None. + + cutoff : integer or None (default: None) + Maximum number of paths to yield. If specified, the maximum flow + algorithm will terminate when the flow value reaches or exceeds the + cutoff. This only works for flows that support the cutoff parameter + (most do) and is ignored otherwise. + + auxiliary : NetworkX DiGraph + Auxiliary digraph to compute flow based edge connectivity. It has + to have a graph attribute called mapping with a dictionary mapping + node names in G and in the auxiliary digraph. If provided + it will be reused instead of recreated. Default value: None. + + residual : NetworkX DiGraph + Residual network to compute maximum flow. If provided it will be + reused instead of recreated. Default value: None. + + Returns + ------- + paths : generator + A generator of edge independent paths. + + Raises + ------ + NetworkXNoPath + If there is no path between source and target. + + NetworkXError + If source or target are not in the graph G. + + See also + -------- + :meth:`node_disjoint_paths` + :meth:`edge_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Examples + -------- + We use in this example the platonic icosahedral graph, which has node + edge connectivity 5, thus there are 5 edge disjoint paths between any + pair of nodes. + + >>> G = nx.icosahedral_graph() + >>> len(list(nx.edge_disjoint_paths(G, 0, 6))) + 5 + + + If you need to compute edge disjoint paths on several pairs of + nodes in the same graph, it is recommended that you reuse the + data structures that NetworkX uses in the computation: the + auxiliary digraph for edge connectivity, and the residual + network for the underlying maximum flow computation. + + Example of how to compute edge disjoint paths among all pairs of + nodes of the platonic icosahedral graph reusing the data + structures. + + >>> import itertools + >>> # You also have to explicitly import the function for + >>> # building the auxiliary digraph from the connectivity package + >>> from networkx.algorithms.connectivity import build_auxiliary_edge_connectivity + >>> H = build_auxiliary_edge_connectivity(G) + >>> # And the function for building the residual network from the + >>> # flow package + >>> from networkx.algorithms.flow import build_residual_network + >>> # Note that the auxiliary digraph has an edge attribute named capacity + >>> R = build_residual_network(H, "capacity") + >>> result = {n: {} for n in G} + >>> # Reuse the auxiliary digraph and the residual network by passing them + >>> # as arguments + >>> for u, v in itertools.combinations(G, 2): + ... k = len(list(nx.edge_disjoint_paths(G, u, v, auxiliary=H, residual=R))) + ... result[u][v] = k + >>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2)) + True + + You can also use alternative flow algorithms for computing edge disjoint + paths. For instance, in dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better than + the default :meth:`edmonds_karp` which is faster for sparse + networks with highly skewed degree distributions. Alternative flow + functions have to be explicitly imported from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> len(list(nx.edge_disjoint_paths(G, 0, 6, flow_func=shortest_augmenting_path))) + 5 + + Notes + ----- + This is a flow based implementation of edge disjoint paths. We compute + the maximum flow between source and target on an auxiliary directed + network. The saturated edges in the residual network after running the + maximum flow algorithm correspond to edge disjoint paths between source + and target in the original network. This function handles both directed + and undirected graphs, and can use all flow algorithms from NetworkX flow + package. + + """ + if s not in G: + raise nx.NetworkXError(f"node {s} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {t} not in graph") + + if flow_func is None: + flow_func = default_flow_func + + if auxiliary is None: + H = build_auxiliary_edge_connectivity(G) + else: + H = auxiliary + + # Maximum possible edge disjoint paths + possible = min(H.out_degree(s), H.in_degree(t)) + if not possible: + raise NetworkXNoPath + + if cutoff is None: + cutoff = possible + else: + cutoff = min(cutoff, possible) + + # Compute maximum flow between source and target. Flow functions in + # NetworkX return a residual network. + kwargs = { + "capacity": "capacity", + "residual": residual, + "cutoff": cutoff, + "value_only": True, + } + if flow_func is preflow_push: + del kwargs["cutoff"] + if flow_func is shortest_augmenting_path: + kwargs["two_phase"] = True + R = flow_func(H, s, t, **kwargs) + + if R.graph["flow_value"] == 0: + raise NetworkXNoPath + + # Saturated edges in the residual network form the edge disjoint paths + # between source and target + cutset = [ + (u, v) + for u, v, d in R.edges(data=True) + if d["capacity"] == d["flow"] and d["flow"] > 0 + ] + # This is equivalent of what flow.utils.build_flow_dict returns, but + # only for the nodes with saturated edges and without reporting 0 flows. + flow_dict = {n: {} for edge in cutset for n in edge} + for u, v in cutset: + flow_dict[u][v] = 1 + + # Rebuild the edge disjoint paths from the flow dictionary. + paths_found = 0 + for v in list(flow_dict[s]): + if paths_found >= cutoff: + # preflow_push does not support cutoff: we have to + # keep track of the paths founds and stop at cutoff. + break + path = [s] + if v == t: + path.append(v) + yield path + continue + u = v + while u != t: + path.append(u) + try: + u, _ = flow_dict[u].popitem() + except KeyError: + break + else: + path.append(t) + yield path + paths_found += 1 + + +@nx._dispatchable( + graphs={"G": 0, "auxiliary?": 5}, + preserve_node_attrs={"auxiliary": {"id": None}}, + preserve_graph_attrs={"auxiliary"}, +) +def node_disjoint_paths( + G, s, t, flow_func=None, cutoff=None, auxiliary=None, residual=None +): + r"""Computes node disjoint paths between source and target. + + Node disjoint paths are paths that only share their first and last + nodes. The number of node independent paths between two nodes is + equal to their local node connectivity. + + Parameters + ---------- + G : NetworkX graph + + s : node + Source node. + + t : node + Target node. + + flow_func : function + A function for computing the maximum flow among a pair of nodes. + The function has to accept at least three parameters: a Digraph, + a source node, and a target node. And return a residual network + that follows NetworkX conventions (see :meth:`maximum_flow` for + details). If flow_func is None, the default maximum flow function + (:meth:`edmonds_karp`) is used. See below for details. The choice + of the default function may change from version to version and + should not be relied on. Default value: None. + + cutoff : integer or None (default: None) + Maximum number of paths to yield. If specified, the maximum flow + algorithm will terminate when the flow value reaches or exceeds the + cutoff. This only works for flows that support the cutoff parameter + (most do) and is ignored otherwise. + + auxiliary : NetworkX DiGraph + Auxiliary digraph to compute flow based node connectivity. It has + to have a graph attribute called mapping with a dictionary mapping + node names in G and in the auxiliary digraph. If provided + it will be reused instead of recreated. Default value: None. + + residual : NetworkX DiGraph + Residual network to compute maximum flow. If provided it will be + reused instead of recreated. Default value: None. + + Returns + ------- + paths : generator + Generator of node disjoint paths. + + Raises + ------ + NetworkXNoPath + If there is no path between source and target. + + NetworkXError + If source or target are not in the graph G. + + Examples + -------- + We use in this example the platonic icosahedral graph, which has node + connectivity 5, thus there are 5 node disjoint paths between any pair + of non neighbor nodes. + + >>> G = nx.icosahedral_graph() + >>> len(list(nx.node_disjoint_paths(G, 0, 6))) + 5 + + If you need to compute node disjoint paths between several pairs of + nodes in the same graph, it is recommended that you reuse the + data structures that NetworkX uses in the computation: the + auxiliary digraph for node connectivity and node cuts, and the + residual network for the underlying maximum flow computation. + + Example of how to compute node disjoint paths reusing the data + structures: + + >>> # You also have to explicitly import the function for + >>> # building the auxiliary digraph from the connectivity package + >>> from networkx.algorithms.connectivity import build_auxiliary_node_connectivity + >>> H = build_auxiliary_node_connectivity(G) + >>> # And the function for building the residual network from the + >>> # flow package + >>> from networkx.algorithms.flow import build_residual_network + >>> # Note that the auxiliary digraph has an edge attribute named capacity + >>> R = build_residual_network(H, "capacity") + >>> # Reuse the auxiliary digraph and the residual network by passing them + >>> # as arguments + >>> len(list(nx.node_disjoint_paths(G, 0, 6, auxiliary=H, residual=R))) + 5 + + You can also use alternative flow algorithms for computing node disjoint + paths. For instance, in dense networks the algorithm + :meth:`shortest_augmenting_path` will usually perform better than + the default :meth:`edmonds_karp` which is faster for sparse + networks with highly skewed degree distributions. Alternative flow + functions have to be explicitly imported from the flow package. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> len(list(nx.node_disjoint_paths(G, 0, 6, flow_func=shortest_augmenting_path))) + 5 + + Notes + ----- + This is a flow based implementation of node disjoint paths. We compute + the maximum flow between source and target on an auxiliary directed + network. The saturated edges in the residual network after running the + maximum flow algorithm correspond to node disjoint paths between source + and target in the original network. This function handles both directed + and undirected graphs, and can use all flow algorithms from NetworkX flow + package. + + See also + -------- + :meth:`edge_disjoint_paths` + :meth:`node_connectivity` + :meth:`maximum_flow` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + """ + if s not in G: + raise nx.NetworkXError(f"node {s} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {t} not in graph") + + if auxiliary is None: + H = build_auxiliary_node_connectivity(G) + else: + H = auxiliary + + mapping = H.graph.get("mapping", None) + if mapping is None: + raise nx.NetworkXError("Invalid auxiliary digraph.") + + # Maximum possible edge disjoint paths + possible = min(H.out_degree(f"{mapping[s]}B"), H.in_degree(f"{mapping[t]}A")) + if not possible: + raise NetworkXNoPath + + if cutoff is None: + cutoff = possible + else: + cutoff = min(cutoff, possible) + + kwargs = { + "flow_func": flow_func, + "residual": residual, + "auxiliary": H, + "cutoff": cutoff, + } + + # The edge disjoint paths in the auxiliary digraph correspond to the node + # disjoint paths in the original graph. + paths_edges = edge_disjoint_paths(H, f"{mapping[s]}B", f"{mapping[t]}A", **kwargs) + for path in paths_edges: + # Each node in the original graph maps to two nodes in auxiliary graph + yield list(_unique_everseen(H.nodes[node]["id"] for node in path)) + + +def _unique_everseen(iterable): + # Adapted from https://docs.python.org/3/library/itertools.html examples + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + seen = set() + seen_add = seen.add + for element in _filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_augmentation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_augmentation.py new file mode 100644 index 0000000..6dfe014 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_augmentation.py @@ -0,0 +1,1270 @@ +""" +Algorithms for finding k-edge-augmentations + +A k-edge-augmentation is a set of edges, that once added to a graph, ensures +that the graph is k-edge-connected; i.e. the graph cannot be disconnected +unless k or more edges are removed. Typically, the goal is to find the +augmentation with minimum weight. In general, it is not guaranteed that a +k-edge-augmentation exists. + +See Also +-------- +:mod:`edge_kcomponents` : algorithms for finding k-edge-connected components +:mod:`connectivity` : algorithms for determining edge connectivity. +""" + +import itertools as it +import math +from collections import defaultdict, namedtuple + +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = ["k_edge_augmentation", "is_k_edge_connected", "is_locally_k_edge_connected"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_k_edge_connected(G, k): + """Tests to see if a graph is k-edge-connected. + + Is it impossible to disconnect the graph by removing fewer than k edges? + If so, then G is k-edge-connected. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + k : integer + edge connectivity to test for + + Returns + ------- + boolean + True if G is k-edge-connected. + + See Also + -------- + :func:`is_locally_k_edge_connected` + + Examples + -------- + >>> G = nx.barbell_graph(10, 0) + >>> nx.is_k_edge_connected(G, k=1) + True + >>> nx.is_k_edge_connected(G, k=2) + False + """ + if k < 1: + raise ValueError(f"k must be positive, not {k}") + # First try to quickly determine if G is not k-edge-connected + if G.number_of_nodes() < k + 1: + return False + elif any(d < k for n, d in G.degree()): + return False + else: + # Otherwise perform the full check + if k == 1: + return nx.is_connected(G) + elif k == 2: + return nx.is_connected(G) and not nx.has_bridges(G) + else: + return nx.edge_connectivity(G, cutoff=k) >= k + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_locally_k_edge_connected(G, s, t, k): + """Tests to see if an edge in a graph is locally k-edge-connected. + + Is it impossible to disconnect s and t by removing fewer than k edges? + If so, then s and t are locally k-edge-connected in G. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + s : node + Source node + + t : node + Target node + + k : integer + local edge connectivity for nodes s and t + + Returns + ------- + boolean + True if s and t are locally k-edge-connected in G. + + See Also + -------- + :func:`is_k_edge_connected` + + Examples + -------- + >>> from networkx.algorithms.connectivity import is_locally_k_edge_connected + >>> G = nx.barbell_graph(10, 0) + >>> is_locally_k_edge_connected(G, 5, 15, k=1) + True + >>> is_locally_k_edge_connected(G, 5, 15, k=2) + False + >>> is_locally_k_edge_connected(G, 1, 5, k=2) + True + """ + if k < 1: + raise ValueError(f"k must be positive, not {k}") + + # First try to quickly determine s, t is not k-locally-edge-connected in G + if G.degree(s) < k or G.degree(t) < k: + return False + else: + # Otherwise perform the full check + if k == 1: + return nx.has_path(G, s, t) + else: + localk = nx.connectivity.local_edge_connectivity(G, s, t, cutoff=k) + return localk >= k + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def k_edge_augmentation(G, k, avail=None, weight=None, partial=False): + """Finds set of edges to k-edge-connect G. + + Adding edges from the augmentation to G make it impossible to disconnect G + unless k or more edges are removed. This function uses the most efficient + function available (depending on the value of k and if the problem is + weighted or unweighted) to search for a minimum weight subset of available + edges that k-edge-connects G. In general, finding a k-edge-augmentation is + NP-hard, so solutions are not guaranteed to be minimal. Furthermore, a + k-edge-augmentation may not exist. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + k : integer + Desired edge connectivity + + avail : dict or a set of 2 or 3 tuples + The available edges that can be used in the augmentation. + + If unspecified, then all edges in the complement of G are available. + Otherwise, each item is an available edge (with an optional weight). + + In the unweighted case, each item is an edge ``(u, v)``. + + In the weighted case, each item is a 3-tuple ``(u, v, d)`` or a dict + with items ``(u, v): d``. The third item, ``d``, can be a dictionary + or a real number. If ``d`` is a dictionary ``d[weight]`` + correspondings to the weight. + + weight : string + key to use to find weights if ``avail`` is a set of 3-tuples where the + third item in each tuple is a dictionary. + + partial : boolean + If partial is True and no feasible k-edge-augmentation exists, then all + a partial k-edge-augmentation is generated. Adding the edges in a + partial augmentation to G, minimizes the number of k-edge-connected + components and maximizes the edge connectivity between those + components. For details, see :func:`partial_k_edge_augmentation`. + + Yields + ------ + edge : tuple + Edges that, once added to G, would cause G to become k-edge-connected. + If partial is False, an error is raised if this is not possible. + Otherwise, generated edges form a partial augmentation, which + k-edge-connects any part of G where it is possible, and maximally + connects the remaining parts. + + Raises + ------ + NetworkXUnfeasible + If partial is False and no k-edge-augmentation exists. + + NetworkXNotImplemented + If the input graph is directed or a multigraph. + + ValueError: + If k is less than 1 + + Notes + ----- + When k=1 this returns an optimal solution. + + When k=2 and ``avail`` is None, this returns an optimal solution. + Otherwise when k=2, this returns a 2-approximation of the optimal solution. + + For k>3, this problem is NP-hard and this uses a randomized algorithm that + produces a feasible solution, but provides no guarantees on the + solution weight. + + Examples + -------- + >>> # Unweighted cases + >>> G = nx.path_graph((1, 2, 3, 4)) + >>> G.add_node(5) + >>> sorted(nx.k_edge_augmentation(G, k=1)) + [(1, 5)] + >>> sorted(nx.k_edge_augmentation(G, k=2)) + [(1, 5), (5, 4)] + >>> sorted(nx.k_edge_augmentation(G, k=3)) + [(1, 4), (1, 5), (2, 5), (3, 5), (4, 5)] + >>> complement = list(nx.k_edge_augmentation(G, k=5, partial=True)) + >>> G.add_edges_from(complement) + >>> nx.edge_connectivity(G) + 4 + + >>> # Weighted cases + >>> G = nx.path_graph((1, 2, 3, 4)) + >>> G.add_node(5) + >>> # avail can be a tuple with a dict + >>> avail = [(1, 5, {"weight": 11}), (2, 5, {"weight": 10})] + >>> sorted(nx.k_edge_augmentation(G, k=1, avail=avail, weight="weight")) + [(2, 5)] + >>> # or avail can be a 3-tuple with a real number + >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 51)] + >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail)) + [(1, 5), (2, 5), (4, 5)] + >>> # or avail can be a dict + >>> avail = {(1, 5): 11, (2, 5): 10, (4, 3): 1, (4, 5): 51} + >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail)) + [(1, 5), (2, 5), (4, 5)] + >>> # If augmentation is infeasible, then a partial solution can be found + >>> avail = {(1, 5): 11} + >>> sorted(nx.k_edge_augmentation(G, k=2, avail=avail, partial=True)) + [(1, 5)] + """ + try: + if k <= 0: + raise ValueError(f"k must be a positive integer, not {k}") + elif G.number_of_nodes() < k + 1: + msg = f"impossible to {k} connect in graph with less than {k + 1} nodes" + raise nx.NetworkXUnfeasible(msg) + elif avail is not None and len(avail) == 0: + if not nx.is_k_edge_connected(G, k): + raise nx.NetworkXUnfeasible("no available edges") + aug_edges = [] + elif k == 1: + aug_edges = one_edge_augmentation( + G, avail=avail, weight=weight, partial=partial + ) + elif k == 2: + aug_edges = bridge_augmentation(G, avail=avail, weight=weight) + else: + # raise NotImplementedError(f'not implemented for k>2. k={k}') + aug_edges = greedy_k_edge_augmentation( + G, k=k, avail=avail, weight=weight, seed=0 + ) + # Do eager evaluation so we can catch any exceptions + # Before executing partial code. + yield from list(aug_edges) + except nx.NetworkXUnfeasible: + if partial: + # Return all available edges + if avail is None: + aug_edges = complement_edges(G) + else: + # If we can't k-edge-connect the entire graph, try to + # k-edge-connect as much as possible + aug_edges = partial_k_edge_augmentation( + G, k=k, avail=avail, weight=weight + ) + yield from aug_edges + else: + raise + + +@nx._dispatchable +def partial_k_edge_augmentation(G, k, avail, weight=None): + """Finds augmentation that k-edge-connects as much of the graph as possible. + + When a k-edge-augmentation is not possible, we can still try to find a + small set of edges that partially k-edge-connects as much of the graph as + possible. All possible edges are generated between remaining parts. + This minimizes the number of k-edge-connected subgraphs in the resulting + graph and maximizes the edge connectivity between those subgraphs. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + k : integer + Desired edge connectivity + + avail : dict or a set of 2 or 3 tuples + For more details, see :func:`k_edge_augmentation`. + + weight : string + key to use to find weights if ``avail`` is a set of 3-tuples. + For more details, see :func:`k_edge_augmentation`. + + Yields + ------ + edge : tuple + Edges in the partial augmentation of G. These edges k-edge-connect any + part of G where it is possible, and maximally connects the remaining + parts. In other words, all edges from avail are generated except for + those within subgraphs that have already become k-edge-connected. + + Notes + ----- + Construct H that augments G with all edges in avail. + Find the k-edge-subgraphs of H. + For each k-edge-subgraph, if the number of nodes is more than k, then find + the k-edge-augmentation of that graph and add it to the solution. Then add + all edges in avail between k-edge subgraphs to the solution. + + See Also + -------- + :func:`k_edge_augmentation` + + Examples + -------- + >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7)) + >>> G.add_node(8) + >>> avail = [(1, 3), (1, 4), (1, 5), (2, 4), (2, 5), (3, 5), (1, 8)] + >>> sorted(partial_k_edge_augmentation(G, k=2, avail=avail)) + [(1, 5), (1, 8)] + """ + + def _edges_between_disjoint(H, only1, only2): + """finds edges between disjoint nodes""" + only1_adj = {u: set(H.adj[u]) for u in only1} + for u, neighbs in only1_adj.items(): + # Find the neighbors of u in only1 that are also in only2 + neighbs12 = neighbs.intersection(only2) + for v in neighbs12: + yield (u, v) + + avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G) + + # Find which parts of the graph can be k-edge-connected + H = G.copy() + H.add_edges_from( + ( + (u, v, {"weight": w, "generator": (u, v)}) + for (u, v), w in zip(avail, avail_w) + ) + ) + k_edge_subgraphs = list(nx.k_edge_subgraphs(H, k=k)) + + # Generate edges to k-edge-connect internal subgraphs + for nodes in k_edge_subgraphs: + if len(nodes) > 1: + # Get the k-edge-connected subgraph + C = H.subgraph(nodes).copy() + # Find the internal edges that were available + sub_avail = { + d["generator"]: d["weight"] + for (u, v, d) in C.edges(data=True) + if "generator" in d + } + # Remove potential augmenting edges + C.remove_edges_from(sub_avail.keys()) + # Find a subset of these edges that makes the component + # k-edge-connected and ignore the rest + yield from nx.k_edge_augmentation(C, k=k, avail=sub_avail) + + # Generate all edges between CCs that could not be k-edge-connected + for cc1, cc2 in it.combinations(k_edge_subgraphs, 2): + for u, v in _edges_between_disjoint(H, cc1, cc2): + d = H.get_edge_data(u, v) + edge = d.get("generator", None) + if edge is not None: + yield edge + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +@nx._dispatchable +def one_edge_augmentation(G, avail=None, weight=None, partial=False): + """Finds minimum weight set of edges to connect G. + + Equivalent to :func:`k_edge_augmentation` when k=1. Adding the resulting + edges to G will make it 1-edge-connected. The solution is optimal for both + weighted and non-weighted variants. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + avail : dict or a set of 2 or 3 tuples + For more details, see :func:`k_edge_augmentation`. + + weight : string + key to use to find weights if ``avail`` is a set of 3-tuples. + For more details, see :func:`k_edge_augmentation`. + + partial : boolean + If partial is True and no feasible k-edge-augmentation exists, then the + augmenting edges minimize the number of connected components. + + Yields + ------ + edge : tuple + Edges in the one-augmentation of G + + Raises + ------ + NetworkXUnfeasible + If partial is False and no one-edge-augmentation exists. + + Notes + ----- + Uses either :func:`unconstrained_one_edge_augmentation` or + :func:`weighted_one_edge_augmentation` depending on whether ``avail`` is + specified. Both algorithms are based on finding a minimum spanning tree. + As such both algorithms find optimal solutions and run in linear time. + + See Also + -------- + :func:`k_edge_augmentation` + """ + if avail is None: + return unconstrained_one_edge_augmentation(G) + else: + return weighted_one_edge_augmentation( + G, avail=avail, weight=weight, partial=partial + ) + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +@nx._dispatchable +def bridge_augmentation(G, avail=None, weight=None): + """Finds the a set of edges that bridge connects G. + + Equivalent to :func:`k_edge_augmentation` when k=2, and partial=False. + Adding the resulting edges to G will make it 2-edge-connected. If no + constraints are specified the returned set of edges is minimum an optimal, + otherwise the solution is approximated. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + avail : dict or a set of 2 or 3 tuples + For more details, see :func:`k_edge_augmentation`. + + weight : string + key to use to find weights if ``avail`` is a set of 3-tuples. + For more details, see :func:`k_edge_augmentation`. + + Yields + ------ + edge : tuple + Edges in the bridge-augmentation of G + + Raises + ------ + NetworkXUnfeasible + If no bridge-augmentation exists. + + Notes + ----- + If there are no constraints the solution can be computed in linear time + using :func:`unconstrained_bridge_augmentation`. Otherwise, the problem + becomes NP-hard and is the solution is approximated by + :func:`weighted_bridge_augmentation`. + + See Also + -------- + :func:`k_edge_augmentation` + """ + if G.number_of_nodes() < 3: + raise nx.NetworkXUnfeasible("impossible to bridge connect less than 3 nodes") + if avail is None: + return unconstrained_bridge_augmentation(G) + else: + return weighted_bridge_augmentation(G, avail, weight=weight) + + +# --- Algorithms and Helpers --- + + +def _ordered(u, v): + """Returns the nodes in an undirected edge in lower-triangular order""" + return (u, v) if u < v else (v, u) + + +def _unpack_available_edges(avail, weight=None, G=None): + """Helper to separate avail into edges and corresponding weights""" + if weight is None: + weight = "weight" + if isinstance(avail, dict): + avail_uv = list(avail.keys()) + avail_w = list(avail.values()) + else: + + def _try_getitem(d): + try: + return d[weight] + except TypeError: + return d + + avail_uv = [tup[0:2] for tup in avail] + avail_w = [1 if len(tup) == 2 else _try_getitem(tup[-1]) for tup in avail] + + if G is not None: + # Edges already in the graph are filtered + flags = [not G.has_edge(u, v) for u, v in avail_uv] + avail_uv = list(it.compress(avail_uv, flags)) + avail_w = list(it.compress(avail_w, flags)) + return avail_uv, avail_w + + +MetaEdge = namedtuple("MetaEdge", ("meta_uv", "uv", "w")) + + +def _lightest_meta_edges(mapping, avail_uv, avail_w): + """Maps available edges in the original graph to edges in the metagraph. + + Parameters + ---------- + mapping : dict + mapping produced by :func:`collapse`, that maps each node in the + original graph to a node in the meta graph + + avail_uv : list + list of edges + + avail_w : list + list of edge weights + + Notes + ----- + Each node in the metagraph is a k-edge-connected component in the original + graph. We don't care about any edge within the same k-edge-connected + component, so we ignore self edges. We also are only interested in the + minimum weight edge bridging each k-edge-connected component so, we group + the edges by meta-edge and take the lightest in each group. + + Examples + -------- + >>> # Each group represents a meta-node + >>> groups = ([1, 2, 3], [4, 5], [6]) + >>> mapping = {n: meta_n for meta_n, ns in enumerate(groups) for n in ns} + >>> avail_uv = [(1, 2), (3, 6), (1, 4), (5, 2), (6, 1), (2, 6), (3, 1)] + >>> avail_w = [20, 99, 20, 15, 50, 99, 20] + >>> sorted(_lightest_meta_edges(mapping, avail_uv, avail_w)) + [MetaEdge(meta_uv=(0, 1), uv=(5, 2), w=15), MetaEdge(meta_uv=(0, 2), uv=(6, 1), w=50)] + """ + grouped_wuv = defaultdict(list) + for w, (u, v) in zip(avail_w, avail_uv): + # Order the meta-edge so it can be used as a dict key + meta_uv = _ordered(mapping[u], mapping[v]) + # Group each available edge using the meta-edge as a key + grouped_wuv[meta_uv].append((w, u, v)) + + # Now that all available edges are grouped, choose one per group + for (mu, mv), choices_wuv in grouped_wuv.items(): + # Ignore available edges within the same meta-node + if mu != mv: + # Choose the lightest available edge belonging to each meta-edge + w, u, v = min(choices_wuv) + yield MetaEdge((mu, mv), (u, v), w) + + +@nx._dispatchable +def unconstrained_one_edge_augmentation(G): + """Finds the smallest set of edges to connect G. + + This is a variant of the unweighted MST problem. + If G is not empty, a feasible solution always exists. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + Yields + ------ + edge : tuple + Edges in the one-edge-augmentation of G + + See Also + -------- + :func:`one_edge_augmentation` + :func:`k_edge_augmentation` + + Examples + -------- + >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)]) + >>> G.add_nodes_from([6, 7, 8]) + >>> sorted(unconstrained_one_edge_augmentation(G)) + [(1, 4), (4, 6), (6, 7), (7, 8)] + """ + ccs1 = list(nx.connected_components(G)) + C = collapse(G, ccs1) + # When we are not constrained, we can just make a meta graph tree. + meta_nodes = list(C.nodes()) + # build a path in the metagraph + meta_aug = list(zip(meta_nodes, meta_nodes[1:])) + # map that path to the original graph + inverse = defaultdict(list) + for k, v in C.graph["mapping"].items(): + inverse[v].append(k) + for mu, mv in meta_aug: + yield (inverse[mu][0], inverse[mv][0]) + + +@nx._dispatchable +def weighted_one_edge_augmentation(G, avail, weight=None, partial=False): + """Finds the minimum weight set of edges to connect G if one exists. + + This is a variant of the weighted MST problem. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + avail : dict or a set of 2 or 3 tuples + For more details, see :func:`k_edge_augmentation`. + + weight : string + key to use to find weights if ``avail`` is a set of 3-tuples. + For more details, see :func:`k_edge_augmentation`. + + partial : boolean + If partial is True and no feasible k-edge-augmentation exists, then the + augmenting edges minimize the number of connected components. + + Yields + ------ + edge : tuple + Edges in the subset of avail chosen to connect G. + + See Also + -------- + :func:`one_edge_augmentation` + :func:`k_edge_augmentation` + + Examples + -------- + >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)]) + >>> G.add_nodes_from([6, 7, 8]) + >>> # any edge not in avail has an implicit weight of infinity + >>> avail = [(1, 3), (1, 5), (4, 7), (4, 8), (6, 1), (8, 1), (8, 2)] + >>> sorted(weighted_one_edge_augmentation(G, avail)) + [(1, 5), (4, 7), (6, 1), (8, 1)] + >>> # find another solution by giving large weights to edges in the + >>> # previous solution (note some of the old edges must be used) + >>> avail = [(1, 3), (1, 5, 99), (4, 7, 9), (6, 1, 99), (8, 1, 99), (8, 2)] + >>> sorted(weighted_one_edge_augmentation(G, avail)) + [(1, 5), (4, 7), (6, 1), (8, 2)] + """ + avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G) + # Collapse CCs in the original graph into nodes in a metagraph + # Then find an MST of the metagraph instead of the original graph + C = collapse(G, nx.connected_components(G)) + mapping = C.graph["mapping"] + # Assign each available edge to an edge in the metagraph + candidate_mapping = _lightest_meta_edges(mapping, avail_uv, avail_w) + # nx.set_edge_attributes(C, name='weight', values=0) + C.add_edges_from( + (mu, mv, {"weight": w, "generator": uv}) + for (mu, mv), uv, w in candidate_mapping + ) + # Find MST of the meta graph + meta_mst = nx.minimum_spanning_tree(C) + if not partial and not nx.is_connected(meta_mst): + raise nx.NetworkXUnfeasible("Not possible to connect G with available edges") + # Yield the edge that generated the meta-edge + for mu, mv, d in meta_mst.edges(data=True): + if "generator" in d: + edge = d["generator"] + yield edge + + +@nx._dispatchable +def unconstrained_bridge_augmentation(G): + """Finds an optimal 2-edge-augmentation of G using the fewest edges. + + This is an implementation of the algorithm detailed in [1]_. + The basic idea is to construct a meta-graph of bridge-ccs, connect leaf + nodes of the trees to connect the entire graph, and finally connect the + leafs of the tree in dfs-preorder to bridge connect the entire graph. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + Yields + ------ + edge : tuple + Edges in the bridge augmentation of G + + Notes + ----- + Input: a graph G. + First find the bridge components of G and collapse each bridge-cc into a + node of a metagraph graph C, which is guaranteed to be a forest of trees. + + C contains p "leafs" --- nodes with exactly one incident edge. + C contains q "isolated nodes" --- nodes with no incident edges. + + Theorem: If p + q > 1, then at least :math:`ceil(p / 2) + q` edges are + needed to bridge connect C. This algorithm achieves this min number. + + The method first adds enough edges to make G into a tree and then pairs + leafs in a simple fashion. + + Let n be the number of trees in C. Let v(i) be an isolated vertex in the + i-th tree if one exists, otherwise it is a pair of distinct leafs nodes + in the i-th tree. Alternating edges from these sets (i.e. adding edges + A1 = [(v(i)[0], v(i + 1)[1]), v(i + 1)[0], v(i + 2)[1])...]) connects C + into a tree T. This tree has p' = p + 2q - 2(n -1) leafs and no isolated + vertices. A1 has n - 1 edges. The next step finds ceil(p' / 2) edges to + biconnect any tree with p' leafs. + + Convert T into an arborescence T' by picking an arbitrary root node with + degree >= 2 and directing all edges away from the root. Note the + implementation implicitly constructs T'. + + The leafs of T are the nodes with no existing edges in T'. + Order the leafs of T' by DFS preorder. Then break this list in half + and add the zipped pairs to A2. + + The set A = A1 + A2 is the minimum augmentation in the metagraph. + + To convert this to edges in the original graph + + References + ---------- + .. [1] Eswaran, Kapali P., and R. Endre Tarjan. (1975) Augmentation problems. + http://epubs.siam.org/doi/abs/10.1137/0205044 + + See Also + -------- + :func:`bridge_augmentation` + :func:`k_edge_augmentation` + + Examples + -------- + >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7)) + >>> sorted(unconstrained_bridge_augmentation(G)) + [(1, 7)] + >>> G = nx.path_graph((1, 2, 3, 2, 4, 5, 6, 7)) + >>> sorted(unconstrained_bridge_augmentation(G)) + [(1, 3), (3, 7)] + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)]) + >>> G.add_node(4) + >>> sorted(unconstrained_bridge_augmentation(G)) + [(1, 4), (4, 0)] + """ + # ----- + # Mapping of terms from (Eswaran and Tarjan): + # G = G_0 - the input graph + # C = G_0' - the bridge condensation of G. (This is a forest of trees) + # A1 = A_1 - the edges to connect the forest into a tree + # leaf = pendant - a node with degree of 1 + + # alpha(v) = maps the node v in G to its meta-node in C + # beta(x) = maps the meta-node x in C to any node in the bridge + # component of G corresponding to x. + + # find the 2-edge-connected components of G + bridge_ccs = list(nx.connectivity.bridge_components(G)) + # condense G into an forest C + C = collapse(G, bridge_ccs) + + # Choose pairs of distinct leaf nodes in each tree. If this is not + # possible then make a pair using the single isolated node in the tree. + vset1 = [ + tuple(cc) * 2 # case1: an isolated node + if len(cc) == 1 + else sorted(cc, key=C.degree)[0:2] # case2: pair of leaf nodes + for cc in nx.connected_components(C) + ] + if len(vset1) > 1: + # Use this set to construct edges that connect C into a tree. + nodes1 = [vs[0] for vs in vset1] + nodes2 = [vs[1] for vs in vset1] + A1 = list(zip(nodes1[1:], nodes2)) + else: + A1 = [] + # Connect each tree in the forest to construct an arborescence + T = C.copy() + T.add_edges_from(A1) + + # If there are only two leaf nodes, we simply connect them. + leafs = [n for n, d in T.degree() if d == 1] + if len(leafs) == 1: + A2 = [] + if len(leafs) == 2: + A2 = [tuple(leafs)] + else: + # Choose an arbitrary non-leaf root + try: + root = next(n for n, d in T.degree() if d > 1) + except StopIteration: # no nodes found with degree > 1 + return + # order the leaves of C by (induced directed) preorder + v2 = [n for n in nx.dfs_preorder_nodes(T, root) if T.degree(n) == 1] + # connecting first half of the leafs in pre-order to the second + # half will bridge connect the tree with the fewest edges. + half = math.ceil(len(v2) / 2) + A2 = list(zip(v2[:half], v2[-half:])) + + # collect the edges used to augment the original forest + aug_tree_edges = A1 + A2 + + # Construct the mapping (beta) from meta-nodes to regular nodes + inverse = defaultdict(list) + for k, v in C.graph["mapping"].items(): + inverse[v].append(k) + # sort so we choose minimum degree nodes first + inverse = { + mu: sorted(mapped, key=lambda u: (G.degree(u), u)) + for mu, mapped in inverse.items() + } + + # For each meta-edge, map back to an arbitrary pair in the original graph + G2 = G.copy() + for mu, mv in aug_tree_edges: + # Find the first available edge that doesn't exist and return it + for u, v in it.product(inverse[mu], inverse[mv]): + if not G2.has_edge(u, v): + G2.add_edge(u, v) + yield u, v + break + + +@nx._dispatchable +def weighted_bridge_augmentation(G, avail, weight=None): + """Finds an approximate min-weight 2-edge-augmentation of G. + + This is an implementation of the approximation algorithm detailed in [1]_. + It chooses a set of edges from avail to add to G that renders it + 2-edge-connected if such a subset exists. This is done by finding a + minimum spanning arborescence of a specially constructed metagraph. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + avail : set of 2 or 3 tuples. + candidate edges (with optional weights) to choose from + + weight : string + key to use to find weights if avail is a set of 3-tuples where the + third item in each tuple is a dictionary. + + Yields + ------ + edge : tuple + Edges in the subset of avail chosen to bridge augment G. + + Notes + ----- + Finding a weighted 2-edge-augmentation is NP-hard. + Any edge not in ``avail`` is considered to have a weight of infinity. + The approximation factor is 2 if ``G`` is connected and 3 if it is not. + Runs in :math:`O(m + n log(n))` time + + References + ---------- + .. [1] Khuller, Samir, and Ramakrishna Thurimella. (1993) Approximation + algorithms for graph augmentation. + http://www.sciencedirect.com/science/article/pii/S0196677483710102 + + See Also + -------- + :func:`bridge_augmentation` + :func:`k_edge_augmentation` + + Examples + -------- + >>> G = nx.path_graph((1, 2, 3, 4)) + >>> # When the weights are equal, (1, 4) is the best + >>> avail = [(1, 4, 1), (1, 3, 1), (2, 4, 1)] + >>> sorted(weighted_bridge_augmentation(G, avail)) + [(1, 4)] + >>> # Giving (1, 4) a high weight makes the two edge solution the best. + >>> avail = [(1, 4, 1000), (1, 3, 1), (2, 4, 1)] + >>> sorted(weighted_bridge_augmentation(G, avail)) + [(1, 3), (2, 4)] + >>> # ------ + >>> G = nx.path_graph((1, 2, 3, 4)) + >>> G.add_node(5) + >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 1)] + >>> sorted(weighted_bridge_augmentation(G, avail=avail)) + [(1, 5), (4, 5)] + >>> avail = [(1, 5, 11), (2, 5, 10), (4, 3, 1), (4, 5, 51)] + >>> sorted(weighted_bridge_augmentation(G, avail=avail)) + [(1, 5), (2, 5), (4, 5)] + """ + + if weight is None: + weight = "weight" + + # If input G is not connected the approximation factor increases to 3 + if not nx.is_connected(G): + H = G.copy() + connectors = list(one_edge_augmentation(H, avail=avail, weight=weight)) + H.add_edges_from(connectors) + + yield from connectors + else: + connectors = [] + H = G + + if len(avail) == 0: + if nx.has_bridges(H): + raise nx.NetworkXUnfeasible("no augmentation possible") + + avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=H) + + # Collapse input into a metagraph. Meta nodes are bridge-ccs + bridge_ccs = nx.connectivity.bridge_components(H) + C = collapse(H, bridge_ccs) + + # Use the meta graph to shrink avail to a small feasible subset + mapping = C.graph["mapping"] + # Choose the minimum weight feasible edge in each group + meta_to_wuv = { + (mu, mv): (w, uv) + for (mu, mv), uv, w in _lightest_meta_edges(mapping, avail_uv, avail_w) + } + + # Mapping of terms from (Khuller and Thurimella): + # C : G_0 = (V, E^0) + # This is the metagraph where each node is a 2-edge-cc in G. + # The edges in C represent bridges in the original graph. + # (mu, mv) : E - E^0 # they group both avail and given edges in E + # T : \Gamma + # D : G^D = (V, E_D) + + # The paper uses ancestor because children point to parents, which is + # contrary to networkx standards. So, we actually need to run + # nx.least_common_ancestor on the reversed Tree. + + # Pick an arbitrary leaf from C as the root + try: + root = next(n for n, d in C.degree() if d == 1) + except StopIteration: # no nodes found with degree == 1 + return + # Root C into a tree TR by directing all edges away from the root + # Note in their paper T directs edges towards the root + TR = nx.dfs_tree(C, root) + + # Add to D the directed edges of T and set their weight to zero + # This indicates that it costs nothing to use edges that were given. + D = nx.reverse(TR).copy() + + nx.set_edge_attributes(D, name="weight", values=0) + + # The LCA of mu and mv in T is the shared ancestor of mu and mv that is + # located farthest from the root. + lca_gen = nx.tree_all_pairs_lowest_common_ancestor( + TR, root=root, pairs=meta_to_wuv.keys() + ) + + for (mu, mv), lca in lca_gen: + w, uv = meta_to_wuv[(mu, mv)] + if lca == mu: + # If u is an ancestor of v in TR, then add edge u->v to D + D.add_edge(lca, mv, weight=w, generator=uv) + elif lca == mv: + # If v is an ancestor of u in TR, then add edge v->u to D + D.add_edge(lca, mu, weight=w, generator=uv) + else: + # If neither u nor v is a ancestor of the other in TR + # let t = lca(TR, u, v) and add edges t->u and t->v + # Track the original edge that GENERATED these edges. + D.add_edge(lca, mu, weight=w, generator=uv) + D.add_edge(lca, mv, weight=w, generator=uv) + + # Then compute a minimum rooted branching + try: + # Note the original edges must be directed towards to root for the + # branching to give us a bridge-augmentation. + A = _minimum_rooted_branching(D, root) + except nx.NetworkXException as err: + # If there is no branching then augmentation is not possible + raise nx.NetworkXUnfeasible("no 2-edge-augmentation possible") from err + + # For each edge e, in the branching that did not belong to the directed + # tree T, add the corresponding edge that **GENERATED** it (this is not + # necessarily e itself!) + + # ensure the third case does not generate edges twice + bridge_connectors = set() + for mu, mv in A.edges(): + data = D.get_edge_data(mu, mv) + if "generator" in data: + # Add the avail edge that generated the branching edge. + edge = data["generator"] + bridge_connectors.add(edge) + + yield from bridge_connectors + + +def _minimum_rooted_branching(D, root): + """Helper function to compute a minimum rooted branching (aka rooted + arborescence) + + Before the branching can be computed, the directed graph must be rooted by + removing the predecessors of root. + + A branching / arborescence of rooted graph G is a subgraph that contains a + directed path from the root to every other vertex. It is the directed + analog of the minimum spanning tree problem. + + References + ---------- + [1] Khuller, Samir (2002) Advanced Algorithms Lecture 24 Notes. + https://web.archive.org/web/20121030033722/https://www.cs.umd.edu/class/spring2011/cmsc651/lec07.pdf + """ + rooted = D.copy() + # root the graph by removing all predecessors to `root`. + rooted.remove_edges_from([(u, root) for u in D.predecessors(root)]) + # Then compute the branching / arborescence. + A = nx.minimum_spanning_arborescence(rooted) + return A + + +@nx._dispatchable(returns_graph=True) +def collapse(G, grouped_nodes): + """Collapses each group of nodes into a single node. + + This is similar to condensation, but works on undirected graphs. + + Parameters + ---------- + G : NetworkX Graph + + grouped_nodes: list or generator + Grouping of nodes to collapse. The grouping must be disjoint. + If grouped_nodes are strongly_connected_components then this is + equivalent to :func:`condensation`. + + Returns + ------- + C : NetworkX Graph + The collapsed graph C of G with respect to the node grouping. The node + labels are integers corresponding to the index of the component in the + list of grouped_nodes. C has a graph attribute named 'mapping' with a + dictionary mapping the original nodes to the nodes in C to which they + belong. Each node in C also has a node attribute 'members' with the set + of original nodes in G that form the group that the node in C + represents. + + Examples + -------- + >>> # Collapses a graph using disjoint groups, but not necessarily connected + >>> G = nx.Graph([(1, 0), (2, 3), (3, 1), (3, 4), (4, 5), (5, 6), (5, 7)]) + >>> G.add_node("A") + >>> grouped_nodes = [{0, 1, 2, 3}, {5, 6, 7}] + >>> C = collapse(G, grouped_nodes) + >>> members = nx.get_node_attributes(C, "members") + >>> sorted(members.keys()) + [0, 1, 2, 3] + >>> member_values = set(map(frozenset, members.values())) + >>> assert {0, 1, 2, 3} in member_values + >>> assert {4} in member_values + >>> assert {5, 6, 7} in member_values + >>> assert {"A"} in member_values + """ + mapping = {} + members = {} + C = G.__class__() + i = 0 # required if G is empty + remaining = set(G.nodes()) + for i, group in enumerate(grouped_nodes): + group = set(group) + assert remaining.issuperset(group), ( + "grouped nodes must exist in G and be disjoint" + ) + remaining.difference_update(group) + members[i] = group + mapping.update((n, i) for n in group) + # remaining nodes are in their own group + for i, node in enumerate(remaining, start=i + 1): + group = {node} + members[i] = group + mapping.update((n, i) for n in group) + number_of_groups = i + 1 + C.add_nodes_from(range(number_of_groups)) + C.add_edges_from( + (mapping[u], mapping[v]) for u, v in G.edges() if mapping[u] != mapping[v] + ) + # Add a list of members (ie original nodes) to each node (ie scc) in C. + nx.set_node_attributes(C, name="members", values=members) + # Add mapping dict as graph attribute + C.graph["mapping"] = mapping + return C + + +@nx._dispatchable +def complement_edges(G): + """Returns only the edges in the complement of G + + Parameters + ---------- + G : NetworkX Graph + + Yields + ------ + edge : tuple + Edges in the complement of G + + Examples + -------- + >>> G = nx.path_graph((1, 2, 3, 4)) + >>> sorted(complement_edges(G)) + [(1, 3), (1, 4), (2, 4)] + >>> G = nx.path_graph((1, 2, 3, 4), nx.DiGraph()) + >>> sorted(complement_edges(G)) + [(1, 3), (1, 4), (2, 1), (2, 4), (3, 1), (3, 2), (4, 1), (4, 2), (4, 3)] + >>> G = nx.complete_graph(1000) + >>> sorted(complement_edges(G)) + [] + """ + G_adj = G._adj # Store as a variable to eliminate attribute lookup + if G.is_directed(): + for u, v in it.combinations(G.nodes(), 2): + if v not in G_adj[u]: + yield (u, v) + if u not in G_adj[v]: + yield (v, u) + else: + for u, v in it.combinations(G.nodes(), 2): + if v not in G_adj[u]: + yield (u, v) + + +def _compat_shuffle(rng, input): + """wrapper around rng.shuffle for python 2 compatibility reasons""" + rng.shuffle(input) + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +@py_random_state(4) +@nx._dispatchable +def greedy_k_edge_augmentation(G, k, avail=None, weight=None, seed=None): + """Greedy algorithm for finding a k-edge-augmentation + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + k : integer + Desired edge connectivity + + avail : dict or a set of 2 or 3 tuples + For more details, see :func:`k_edge_augmentation`. + + weight : string + key to use to find weights if ``avail`` is a set of 3-tuples. + For more details, see :func:`k_edge_augmentation`. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Yields + ------ + edge : tuple + Edges in the greedy augmentation of G + + Notes + ----- + The algorithm is simple. Edges are incrementally added between parts of the + graph that are not yet locally k-edge-connected. Then edges are from the + augmenting set are pruned as long as local-edge-connectivity is not broken. + + This algorithm is greedy and does not provide optimality guarantees. It + exists only to provide :func:`k_edge_augmentation` with the ability to + generate a feasible solution for arbitrary k. + + See Also + -------- + :func:`k_edge_augmentation` + + Examples + -------- + >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7)) + >>> sorted(greedy_k_edge_augmentation(G, k=2)) + [(1, 7)] + >>> sorted(greedy_k_edge_augmentation(G, k=1, avail=[])) + [] + >>> G = nx.path_graph((1, 2, 3, 4, 5, 6, 7)) + >>> avail = {(u, v): 1 for (u, v) in complement_edges(G)} + >>> # randomized pruning process can produce different solutions + >>> sorted(greedy_k_edge_augmentation(G, k=4, avail=avail, seed=2)) + [(1, 3), (1, 4), (1, 5), (1, 6), (1, 7), (2, 4), (2, 6), (3, 7), (5, 7)] + >>> sorted(greedy_k_edge_augmentation(G, k=4, avail=avail, seed=3)) + [(1, 3), (1, 5), (1, 6), (2, 4), (2, 6), (3, 7), (4, 7), (5, 7)] + """ + # Result set + aug_edges = [] + + done = is_k_edge_connected(G, k) + if done: + return + if avail is None: + # all edges are available + avail_uv = list(complement_edges(G)) + avail_w = [1] * len(avail_uv) + else: + # Get the unique set of unweighted edges + avail_uv, avail_w = _unpack_available_edges(avail, weight=weight, G=G) + + # Greedy: order lightest edges. Use degree sum to tie-break + tiebreaker = [sum(map(G.degree, uv)) for uv in avail_uv] + avail_wduv = sorted(zip(avail_w, tiebreaker, avail_uv)) + avail_uv = [uv for w, d, uv in avail_wduv] + + # Incrementally add edges in until we are k-connected + H = G.copy() + for u, v in avail_uv: + done = False + if not is_locally_k_edge_connected(H, u, v, k=k): + # Only add edges in parts that are not yet locally k-edge-connected + aug_edges.append((u, v)) + H.add_edge(u, v) + # Did adding this edge help? + if H.degree(u) >= k and H.degree(v) >= k: + done = is_k_edge_connected(H, k) + if done: + break + + # Check for feasibility + if not done: + raise nx.NetworkXUnfeasible("not able to k-edge-connect with available edges") + + # Randomized attempt to reduce the size of the solution + _compat_shuffle(seed, aug_edges) + for u, v in list(aug_edges): + # Don't remove if we know it would break connectivity + if H.degree(u) <= k or H.degree(v) <= k: + continue + H.remove_edge(u, v) + aug_edges.remove((u, v)) + if not is_k_edge_connected(H, k=k): + # If removing this edge breaks feasibility, undo + H.add_edge(u, v) + aug_edges.append((u, v)) + + # Generate results + yield from aug_edges diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py new file mode 100644 index 0000000..96886f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/edge_kcomponents.py @@ -0,0 +1,592 @@ +""" +Algorithms for finding k-edge-connected components and subgraphs. + +A k-edge-connected component (k-edge-cc) is a maximal set of nodes in G, such +that all pairs of node have an edge-connectivity of at least k. + +A k-edge-connected subgraph (k-edge-subgraph) is a maximal set of nodes in G, +such that the subgraph of G defined by the nodes has an edge-connectivity at +least k. +""" + +import itertools as it +from functools import partial + +import networkx as nx +from networkx.utils import arbitrary_element, not_implemented_for + +__all__ = [ + "k_edge_components", + "k_edge_subgraphs", + "bridge_components", + "EdgeComponentAuxGraph", +] + + +@not_implemented_for("multigraph") +@nx._dispatchable +def k_edge_components(G, k): + """Generates nodes in each maximal k-edge-connected component in G. + + Parameters + ---------- + G : NetworkX graph + + k : Integer + Desired edge connectivity + + Returns + ------- + k_edge_components : a generator of k-edge-ccs. Each set of returned nodes + will have k-edge-connectivity in the graph G. + + See Also + -------- + :func:`local_edge_connectivity` + :func:`k_edge_subgraphs` : similar to this function, but the subgraph + defined by the nodes must also have k-edge-connectivity. + :func:`k_components` : similar to this function, but uses node-connectivity + instead of edge-connectivity + + Raises + ------ + NetworkXNotImplemented + If the input graph is a multigraph. + + ValueError: + If k is less than 1 + + Notes + ----- + Attempts to use the most efficient implementation available based on k. + If k=1, this is simply connected components for directed graphs and + connected components for undirected graphs. + If k=2 on an efficient bridge connected component algorithm from _[1] is + run based on the chain decomposition. + Otherwise, the algorithm from _[2] is used. + + Examples + -------- + >>> import itertools as it + >>> from networkx.utils import pairwise + >>> paths = [ + ... (1, 2, 4, 3, 1, 4), + ... (5, 6, 7, 8, 5, 7, 8, 6), + ... ] + >>> G = nx.Graph() + >>> G.add_nodes_from(it.chain(*paths)) + >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths])) + >>> # note this returns {1, 4} unlike k_edge_subgraphs + >>> sorted(map(sorted, nx.k_edge_components(G, k=3))) + [[1, 4], [2], [3], [5, 6, 7, 8]] + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29 + .. [2] Wang, Tianhao, et al. (2015) A simple algorithm for finding all + k-edge-connected components. + http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264 + """ + # Compute k-edge-ccs using the most efficient algorithms available. + if k < 1: + raise ValueError("k cannot be less than 1") + if G.is_directed(): + if k == 1: + return nx.strongly_connected_components(G) + else: + # TODO: investigate https://arxiv.org/abs/1412.6466 for k=2 + aux_graph = EdgeComponentAuxGraph.construct(G) + return aux_graph.k_edge_components(k) + else: + if k == 1: + return nx.connected_components(G) + elif k == 2: + return bridge_components(G) + else: + aux_graph = EdgeComponentAuxGraph.construct(G) + return aux_graph.k_edge_components(k) + + +@not_implemented_for("multigraph") +@nx._dispatchable +def k_edge_subgraphs(G, k): + """Generates nodes in each maximal k-edge-connected subgraph in G. + + Parameters + ---------- + G : NetworkX graph + + k : Integer + Desired edge connectivity + + Returns + ------- + k_edge_subgraphs : a generator of k-edge-subgraphs + Each k-edge-subgraph is a maximal set of nodes that defines a subgraph + of G that is k-edge-connected. + + See Also + -------- + :func:`edge_connectivity` + :func:`k_edge_components` : similar to this function, but nodes only + need to have k-edge-connectivity within the graph G and the subgraphs + might not be k-edge-connected. + + Raises + ------ + NetworkXNotImplemented + If the input graph is a multigraph. + + ValueError: + If k is less than 1 + + Notes + ----- + Attempts to use the most efficient implementation available based on k. + If k=1, or k=2 and the graph is undirected, then this simply calls + `k_edge_components`. Otherwise the algorithm from _[1] is used. + + Examples + -------- + >>> import itertools as it + >>> from networkx.utils import pairwise + >>> paths = [ + ... (1, 2, 4, 3, 1, 4), + ... (5, 6, 7, 8, 5, 7, 8, 6), + ... ] + >>> G = nx.Graph() + >>> G.add_nodes_from(it.chain(*paths)) + >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths])) + >>> # note this does not return {1, 4} unlike k_edge_components + >>> sorted(map(sorted, nx.k_edge_subgraphs(G, k=3))) + [[1], [2], [3], [4], [5, 6, 7, 8]] + + References + ---------- + .. [1] Zhou, Liu, et al. (2012) Finding maximal k-edge-connected subgraphs + from a large graph. ACM International Conference on Extending Database + Technology 2012 480-–491. + https://openproceedings.org/2012/conf/edbt/ZhouLYLCL12.pdf + """ + if k < 1: + raise ValueError("k cannot be less than 1") + if G.is_directed(): + if k <= 1: + # For directed graphs , + # When k == 1, k-edge-ccs and k-edge-subgraphs are the same + return k_edge_components(G, k) + else: + return _k_edge_subgraphs_nodes(G, k) + else: + if k <= 2: + # For undirected graphs, + # when k <= 2, k-edge-ccs and k-edge-subgraphs are the same + return k_edge_components(G, k) + else: + return _k_edge_subgraphs_nodes(G, k) + + +def _k_edge_subgraphs_nodes(G, k): + """Helper to get the nodes from the subgraphs. + + This allows k_edge_subgraphs to return a generator. + """ + for C in general_k_edge_subgraphs(G, k): + yield set(C.nodes()) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def bridge_components(G): + """Finds all bridge-connected components G. + + Parameters + ---------- + G : NetworkX undirected graph + + Returns + ------- + bridge_components : a generator of 2-edge-connected components + + + See Also + -------- + :func:`k_edge_subgraphs` : this function is a special case for an + undirected graph where k=2. + :func:`biconnected_components` : similar to this function, but is defined + using 2-node-connectivity instead of 2-edge-connectivity. + + Raises + ------ + NetworkXNotImplemented + If the input graph is directed or a multigraph. + + Notes + ----- + Bridge-connected components are also known as 2-edge-connected components. + + Examples + -------- + >>> # The barbell graph with parameter zero has a single bridge + >>> G = nx.barbell_graph(5, 0) + >>> from networkx.algorithms.connectivity.edge_kcomponents import bridge_components + >>> sorted(map(sorted, bridge_components(G))) + [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]] + """ + H = G.copy() + H.remove_edges_from(nx.bridges(G)) + yield from nx.connected_components(H) + + +class EdgeComponentAuxGraph: + r"""A simple algorithm to find all k-edge-connected components in a graph. + + Constructing the auxiliary graph (which may take some time) allows for the + k-edge-ccs to be found in linear time for arbitrary k. + + Notes + ----- + This implementation is based on [1]_. The idea is to construct an auxiliary + graph from which the k-edge-ccs can be extracted in linear time. The + auxiliary graph is constructed in $O(|V|\cdot F)$ operations, where F is the + complexity of max flow. Querying the components takes an additional $O(|V|)$ + operations. This algorithm can be slow for large graphs, but it handles an + arbitrary k and works for both directed and undirected inputs. + + The undirected case for k=1 is exactly connected components. + The undirected case for k=2 is exactly bridge connected components. + The directed case for k=1 is exactly strongly connected components. + + References + ---------- + .. [1] Wang, Tianhao, et al. (2015) A simple algorithm for finding all + k-edge-connected components. + http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264 + + Examples + -------- + >>> import itertools as it + >>> from networkx.utils import pairwise + >>> from networkx.algorithms.connectivity import EdgeComponentAuxGraph + >>> # Build an interesting graph with multiple levels of k-edge-ccs + >>> paths = [ + ... (1, 2, 3, 4, 1, 3, 4, 2), # a 3-edge-cc (a 4 clique) + ... (5, 6, 7, 5), # a 2-edge-cc (a 3 clique) + ... (1, 5), # combine first two ccs into a 1-edge-cc + ... (0,), # add an additional disconnected 1-edge-cc + ... ] + >>> G = nx.Graph() + >>> G.add_nodes_from(it.chain(*paths)) + >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths])) + >>> # Constructing the AuxGraph takes about O(n ** 4) + >>> aux_graph = EdgeComponentAuxGraph.construct(G) + >>> # Once constructed, querying takes O(n) + >>> sorted(map(sorted, aux_graph.k_edge_components(k=1))) + [[0], [1, 2, 3, 4, 5, 6, 7]] + >>> sorted(map(sorted, aux_graph.k_edge_components(k=2))) + [[0], [1, 2, 3, 4], [5, 6, 7]] + >>> sorted(map(sorted, aux_graph.k_edge_components(k=3))) + [[0], [1, 2, 3, 4], [5], [6], [7]] + >>> sorted(map(sorted, aux_graph.k_edge_components(k=4))) + [[0], [1], [2], [3], [4], [5], [6], [7]] + + The auxiliary graph is primarily used for k-edge-ccs but it + can also speed up the queries of k-edge-subgraphs by refining the + search space. + + >>> import itertools as it + >>> from networkx.utils import pairwise + >>> from networkx.algorithms.connectivity import EdgeComponentAuxGraph + >>> paths = [ + ... (1, 2, 4, 3, 1, 4), + ... ] + >>> G = nx.Graph() + >>> G.add_nodes_from(it.chain(*paths)) + >>> G.add_edges_from(it.chain(*[pairwise(path) for path in paths])) + >>> aux_graph = EdgeComponentAuxGraph.construct(G) + >>> sorted(map(sorted, aux_graph.k_edge_subgraphs(k=3))) + [[1], [2], [3], [4]] + >>> sorted(map(sorted, aux_graph.k_edge_components(k=3))) + [[1, 4], [2], [3]] + """ + + # @not_implemented_for('multigraph') # TODO: fix decor for classmethods + @classmethod + def construct(EdgeComponentAuxGraph, G): + """Builds an auxiliary graph encoding edge-connectivity between nodes. + + Notes + ----- + Given G=(V, E), initialize an empty auxiliary graph A. + Choose an arbitrary source node s. Initialize a set N of available + nodes (that can be used as the sink). The algorithm picks an + arbitrary node t from N - {s}, and then computes the minimum st-cut + (S, T) with value w. If G is directed the minimum of the st-cut or + the ts-cut is used instead. Then, the edge (s, t) is added to the + auxiliary graph with weight w. The algorithm is called recursively + first using S as the available nodes and s as the source, and then + using T and t. Recursion stops when the source is the only available + node. + + Parameters + ---------- + G : NetworkX graph + """ + # workaround for classmethod decorator + not_implemented_for("multigraph")(lambda G: G)(G) + + def _recursive_build(H, A, source, avail): + # Terminate once the flow has been compute to every node. + if {source} == avail: + return + # pick an arbitrary node as the sink + sink = arbitrary_element(avail - {source}) + # find the minimum cut and its weight + value, (S, T) = nx.minimum_cut(H, source, sink) + if H.is_directed(): + # check if the reverse direction has a smaller cut + value_, (T_, S_) = nx.minimum_cut(H, sink, source) + if value_ < value: + value, S, T = value_, S_, T_ + # add edge with weight of cut to the aux graph + A.add_edge(source, sink, weight=value) + # recursively call until all but one node is used + _recursive_build(H, A, source, avail.intersection(S)) + _recursive_build(H, A, sink, avail.intersection(T)) + + # Copy input to ensure all edges have unit capacity + H = G.__class__() + H.add_nodes_from(G.nodes()) + H.add_edges_from(G.edges(), capacity=1) + + # A is the auxiliary graph to be constructed + # It is a weighted undirected tree + A = nx.Graph() + + # Pick an arbitrary node as the source + if H.number_of_nodes() > 0: + source = arbitrary_element(H.nodes()) + # Initialize a set of elements that can be chosen as the sink + avail = set(H.nodes()) + + # This constructs A + _recursive_build(H, A, source, avail) + + # This class is a container the holds the auxiliary graph A and + # provides access the k_edge_components function. + self = EdgeComponentAuxGraph() + self.A = A + self.H = H + return self + + def k_edge_components(self, k): + """Queries the auxiliary graph for k-edge-connected components. + + Parameters + ---------- + k : Integer + Desired edge connectivity + + Returns + ------- + k_edge_components : a generator of k-edge-ccs + + Notes + ----- + Given the auxiliary graph, the k-edge-connected components can be + determined in linear time by removing all edges with weights less than + k from the auxiliary graph. The resulting connected components are the + k-edge-ccs in the original graph. + """ + if k < 1: + raise ValueError("k cannot be less than 1") + A = self.A + # "traverse the auxiliary graph A and delete all edges with weights less + # than k" + aux_weights = nx.get_edge_attributes(A, "weight") + # Create a relevant graph with the auxiliary edges with weights >= k + R = nx.Graph() + R.add_nodes_from(A.nodes()) + R.add_edges_from(e for e, w in aux_weights.items() if w >= k) + + # Return the nodes that are k-edge-connected in the original graph + yield from nx.connected_components(R) + + def k_edge_subgraphs(self, k): + """Queries the auxiliary graph for k-edge-connected subgraphs. + + Parameters + ---------- + k : Integer + Desired edge connectivity + + Returns + ------- + k_edge_subgraphs : a generator of k-edge-subgraphs + + Notes + ----- + Refines the k-edge-ccs into k-edge-subgraphs. The running time is more + than $O(|V|)$. + + For single values of k it is faster to use `nx.k_edge_subgraphs`. + But for multiple values of k, it can be faster to build AuxGraph and + then use this method. + """ + if k < 1: + raise ValueError("k cannot be less than 1") + H = self.H + A = self.A + # "traverse the auxiliary graph A and delete all edges with weights less + # than k" + aux_weights = nx.get_edge_attributes(A, "weight") + # Create a relevant graph with the auxiliary edges with weights >= k + R = nx.Graph() + R.add_nodes_from(A.nodes()) + R.add_edges_from(e for e, w in aux_weights.items() if w >= k) + + # Return the components whose subgraphs are k-edge-connected + for cc in nx.connected_components(R): + if len(cc) < k: + # Early return optimization + for node in cc: + yield {node} + else: + # Call subgraph solution to refine the results + C = H.subgraph(cc) + yield from k_edge_subgraphs(C, k) + + +def _low_degree_nodes(G, k, nbunch=None): + """Helper for finding nodes with degree less than k.""" + # Nodes with degree less than k cannot be k-edge-connected. + if G.is_directed(): + # Consider both in and out degree in the directed case + seen = set() + for node, degree in G.out_degree(nbunch): + if degree < k: + seen.add(node) + yield node + for node, degree in G.in_degree(nbunch): + if node not in seen and degree < k: + seen.add(node) + yield node + else: + # Only the degree matters in the undirected case + for node, degree in G.degree(nbunch): + if degree < k: + yield node + + +def _high_degree_components(G, k): + """Helper for filtering components that can't be k-edge-connected. + + Removes and generates each node with degree less than k. Then generates + remaining components where all nodes have degree at least k. + """ + # Iteratively remove parts of the graph that are not k-edge-connected + H = G.copy() + singletons = set(_low_degree_nodes(H, k)) + while singletons: + # Only search neighbors of removed nodes + nbunch = set(it.chain.from_iterable(map(H.neighbors, singletons))) + nbunch.difference_update(singletons) + H.remove_nodes_from(singletons) + for node in singletons: + yield {node} + singletons = set(_low_degree_nodes(H, k, nbunch)) + + # Note: remaining connected components may not be k-edge-connected + if G.is_directed(): + yield from nx.strongly_connected_components(H) + else: + yield from nx.connected_components(H) + + +@nx._dispatchable(returns_graph=True) +def general_k_edge_subgraphs(G, k): + """General algorithm to find all maximal k-edge-connected subgraphs in `G`. + + Parameters + ---------- + G : nx.Graph + Graph in which all maximal k-edge-connected subgraphs will be found. + + k : int + + Yields + ------ + k_edge_subgraphs : Graph instances that are k-edge-subgraphs + Each k-edge-subgraph contains a maximal set of nodes that defines a + subgraph of `G` that is k-edge-connected. + + Notes + ----- + Implementation of the basic algorithm from [1]_. The basic idea is to find + a global minimum cut of the graph. If the cut value is at least k, then the + graph is a k-edge-connected subgraph and can be added to the results. + Otherwise, the cut is used to split the graph in two and the procedure is + applied recursively. If the graph is just a single node, then it is also + added to the results. At the end, each result is either guaranteed to be + a single node or a subgraph of G that is k-edge-connected. + + This implementation contains optimizations for reducing the number of calls + to max-flow, but there are other optimizations in [1]_ that could be + implemented. + + References + ---------- + .. [1] Zhou, Liu, et al. (2012) Finding maximal k-edge-connected subgraphs + from a large graph. ACM International Conference on Extending Database + Technology 2012 480-–491. + https://openproceedings.org/2012/conf/edbt/ZhouLYLCL12.pdf + + Examples + -------- + >>> from networkx.utils import pairwise + >>> paths = [ + ... (11, 12, 13, 14, 11, 13, 14, 12), # a 4-clique + ... (21, 22, 23, 24, 21, 23, 24, 22), # another 4-clique + ... # connect the cliques with high degree but low connectivity + ... (50, 13), + ... (12, 50, 22), + ... (13, 102, 23), + ... (14, 101, 24), + ... ] + >>> G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) + >>> sorted(len(k_sg) for k_sg in k_edge_subgraphs(G, k=3)) + [1, 1, 1, 4, 4] + """ + if k < 1: + raise ValueError("k cannot be less than 1") + + # Node pruning optimization (incorporates early return) + # find_ccs is either connected_components/strongly_connected_components + find_ccs = partial(_high_degree_components, k=k) + + # Quick return optimization + if G.number_of_nodes() < k: + for node in G.nodes(): + yield G.subgraph([node]).copy() + return + + # Intermediate results + R0 = {G.subgraph(cc).copy() for cc in find_ccs(G)} + # Subdivide CCs in the intermediate results until they are k-conn + while R0: + G1 = R0.pop() + if G1.number_of_nodes() == 1: + yield G1 + else: + # Find a global minimum cut + cut_edges = nx.minimum_edge_cut(G1) + cut_value = len(cut_edges) + if cut_value < k: + # G1 is not k-edge-connected, so subdivide it + G1.remove_edges_from(cut_edges) + for cc in find_ccs(G1): + R0.add(G1.subgraph(cc).copy()) + else: + # Otherwise we found a k-edge-connected subgraph + yield G1 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcomponents.py new file mode 100644 index 0000000..e2f1ba2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcomponents.py @@ -0,0 +1,223 @@ +""" +Moody and White algorithm for k-components +""" + +from collections import defaultdict +from itertools import combinations +from operator import itemgetter + +import networkx as nx + +# Define the default maximum flow function. +from networkx.algorithms.flow import edmonds_karp +from networkx.utils import not_implemented_for + +default_flow_func = edmonds_karp + +__all__ = ["k_components"] + + +@not_implemented_for("directed") +@nx._dispatchable +def k_components(G, flow_func=None): + r"""Returns the k-component structure of a graph G. + + A `k`-component is a maximal subgraph of a graph G that has, at least, + node connectivity `k`: we need to remove at least `k` nodes to break it + into more components. `k`-components have an inherent hierarchical + structure because they are nested in terms of connectivity: a connected + graph can contain several 2-components, each of which can contain + one or more 3-components, and so forth. + + Parameters + ---------- + G : NetworkX graph + + flow_func : function + Function to perform the underlying flow computations. Default value + :meth:`edmonds_karp`. This function performs better in sparse graphs with + right tailed degree distributions. :meth:`shortest_augmenting_path` will + perform better in denser graphs. + + Returns + ------- + k_components : dict + Dictionary with all connectivity levels `k` in the input Graph as keys + and a list of sets of nodes that form a k-component of level `k` as + values. + + Raises + ------ + NetworkXNotImplemented + If the input graph is directed. + + Examples + -------- + >>> # Petersen graph has 10 nodes and it is triconnected, thus all + >>> # nodes are in a single component on all three connectivity levels + >>> G = nx.petersen_graph() + >>> k_components = nx.k_components(G) + + Notes + ----- + Moody and White [1]_ (appendix A) provide an algorithm for identifying + k-components in a graph, which is based on Kanevsky's algorithm [2]_ + for finding all minimum-size node cut-sets of a graph (implemented in + :meth:`all_node_cuts` function): + + 1. Compute node connectivity, k, of the input graph G. + + 2. Identify all k-cutsets at the current level of connectivity using + Kanevsky's algorithm. + + 3. Generate new graph components based on the removal of + these cutsets. Nodes in a cutset belong to both sides + of the induced cut. + + 4. If the graph is neither complete nor trivial, return to 1; + else end. + + This implementation also uses some heuristics (see [3]_ for details) + to speed up the computation. + + See also + -------- + node_connectivity + all_node_cuts + biconnected_components : special case of this function when k=2 + k_edge_components : similar to this function, but uses edge-connectivity + instead of node-connectivity + + References + ---------- + .. [1] Moody, J. and D. White (2003). Social cohesion and embeddedness: + A hierarchical conception of social groups. + American Sociological Review 68(1), 103--28. + http://www2.asanet.org/journals/ASRFeb03MoodyWhite.pdf + + .. [2] Kanevsky, A. (1993). Finding all minimum-size separating vertex + sets in a graph. Networks 23(6), 533--541. + http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract + + .. [3] Torrents, J. and F. Ferraro (2015). Structural Cohesion: + Visualization and Heuristics for Fast Computation. + https://arxiv.org/pdf/1503.04476v1 + + """ + # Dictionary with connectivity level (k) as keys and a list of + # sets of nodes that form a k-component as values. Note that + # k-components can overlap (but only k - 1 nodes). + k_components = defaultdict(list) + # Define default flow function + if flow_func is None: + flow_func = default_flow_func + # Bicomponents as a base to check for higher order k-components + for component in nx.connected_components(G): + # isolated nodes have connectivity 0 + comp = set(component) + if len(comp) > 1: + k_components[1].append(comp) + bicomponents = [G.subgraph(c) for c in nx.biconnected_components(G)] + for bicomponent in bicomponents: + bicomp = set(bicomponent) + # avoid considering dyads as bicomponents + if len(bicomp) > 2: + k_components[2].append(bicomp) + for B in bicomponents: + if len(B) <= 2: + continue + k = nx.node_connectivity(B, flow_func=flow_func) + if k > 2: + k_components[k].append(set(B)) + # Perform cuts in a DFS like order. + cuts = list(nx.all_node_cuts(B, k=k, flow_func=flow_func)) + stack = [(k, _generate_partition(B, cuts, k))] + while stack: + (parent_k, partition) = stack[-1] + try: + nodes = next(partition) + C = B.subgraph(nodes) + this_k = nx.node_connectivity(C, flow_func=flow_func) + if this_k > parent_k and this_k > 2: + k_components[this_k].append(set(C)) + cuts = list(nx.all_node_cuts(C, k=this_k, flow_func=flow_func)) + if cuts: + stack.append((this_k, _generate_partition(C, cuts, this_k))) + except StopIteration: + stack.pop() + + # This is necessary because k-components may only be reported at their + # maximum k level. But we want to return a dictionary in which keys are + # connectivity levels and values list of sets of components, without + # skipping any connectivity level. Also, it's possible that subsets of + # an already detected k-component appear at a level k. Checking for this + # in the while loop above penalizes the common case. Thus we also have to + # _consolidate all connectivity levels in _reconstruct_k_components. + return _reconstruct_k_components(k_components) + + +def _consolidate(sets, k): + """Merge sets that share k or more elements. + + See: http://rosettacode.org/wiki/Set_consolidation + + The iterative python implementation posted there is + faster than this because of the overhead of building a + Graph and calling nx.connected_components, but it's not + clear for us if we can use it in NetworkX because there + is no licence for the code. + + """ + G = nx.Graph() + nodes = dict(enumerate(sets)) + G.add_nodes_from(nodes) + G.add_edges_from( + (u, v) for u, v in combinations(nodes, 2) if len(nodes[u] & nodes[v]) >= k + ) + for component in nx.connected_components(G): + yield set.union(*[nodes[n] for n in component]) + + +def _generate_partition(G, cuts, k): + def has_nbrs_in_partition(G, node, partition): + return any(n in partition for n in G[node]) + + components = [] + nodes = {n for n, d in G.degree() if d > k} - {n for cut in cuts for n in cut} + H = G.subgraph(nodes) + for cc in nx.connected_components(H): + component = set(cc) + for cut in cuts: + for node in cut: + if has_nbrs_in_partition(G, node, cc): + component.add(node) + if len(component) < G.order(): + components.append(component) + yield from _consolidate(components, k + 1) + + +def _reconstruct_k_components(k_comps): + result = {} + max_k = max(k_comps) + for k in reversed(range(1, max_k + 1)): + if k == max_k: + result[k] = list(_consolidate(k_comps[k], k)) + elif k not in k_comps: + result[k] = list(_consolidate(result[k + 1], k)) + else: + nodes_at_k = set.union(*k_comps[k]) + to_add = [c for c in result[k + 1] if any(n not in nodes_at_k for n in c)] + if to_add: + result[k] = list(_consolidate(k_comps[k] + to_add, k)) + else: + result[k] = list(_consolidate(k_comps[k], k)) + return result + + +def build_k_number_dict(kcomps): + result = {} + for k, comps in sorted(kcomps.items(), key=itemgetter(0)): + for comp in comps: + for node in comp: + result[node] = k + return result diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcutsets.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcutsets.py new file mode 100644 index 0000000..de26f4c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/kcutsets.py @@ -0,0 +1,235 @@ +""" +Kanevsky all minimum node k cutsets algorithm. +""" + +import copy +from collections import defaultdict +from itertools import combinations +from operator import itemgetter + +import networkx as nx +from networkx.algorithms.flow import ( + build_residual_network, + edmonds_karp, + shortest_augmenting_path, +) + +from .utils import build_auxiliary_node_connectivity + +default_flow_func = edmonds_karp + + +__all__ = ["all_node_cuts"] + + +@nx._dispatchable +def all_node_cuts(G, k=None, flow_func=None): + r"""Returns all minimum k cutsets of an undirected graph G. + + This implementation is based on Kanevsky's algorithm [1]_ for finding all + minimum-size node cut-sets of an undirected graph G; ie the set (or sets) + of nodes of cardinality equal to the node connectivity of G. Thus if + removed, would break G into two or more connected components. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + k : Integer + Node connectivity of the input graph. If k is None, then it is + computed. Default value: None. + + flow_func : function + Function to perform the underlying flow computations. Default value is + :func:`~networkx.algorithms.flow.edmonds_karp`. This function performs + better in sparse graphs with right tailed degree distributions. + :func:`~networkx.algorithms.flow.shortest_augmenting_path` will + perform better in denser graphs. + + + Returns + ------- + cuts : a generator of node cutsets + Each node cutset has cardinality equal to the node connectivity of + the input graph. + + Examples + -------- + >>> # A two-dimensional grid graph has 4 cutsets of cardinality 2 + >>> G = nx.grid_2d_graph(5, 5) + >>> cutsets = list(nx.all_node_cuts(G)) + >>> len(cutsets) + 4 + >>> all(2 == len(cutset) for cutset in cutsets) + True + >>> nx.node_connectivity(G) + 2 + + Notes + ----- + This implementation is based on the sequential algorithm for finding all + minimum-size separating vertex sets in a graph [1]_. The main idea is to + compute minimum cuts using local maximum flow computations among a set + of nodes of highest degree and all other non-adjacent nodes in the Graph. + Once we find a minimum cut, we add an edge between the high degree + node and the target node of the local maximum flow computation to make + sure that we will not find that minimum cut again. + + See also + -------- + node_connectivity + edmonds_karp + shortest_augmenting_path + + References + ---------- + .. [1] Kanevsky, A. (1993). Finding all minimum-size separating vertex + sets in a graph. Networks 23(6), 533--541. + http://onlinelibrary.wiley.com/doi/10.1002/net.3230230604/abstract + + """ + if not nx.is_connected(G): + raise nx.NetworkXError("Input graph is disconnected.") + + # Address some corner cases first. + # For complete Graphs + + if nx.density(G) == 1: + yield from () + return + + # Initialize data structures. + # Keep track of the cuts already computed so we do not repeat them. + seen = [] + # Even-Tarjan reduction is what we call auxiliary digraph + # for node connectivity. + H = build_auxiliary_node_connectivity(G) + H_nodes = H.nodes # for speed + mapping = H.graph["mapping"] + # Keep a copy of original predecessors, H will be modified later. + # Shallow copy is enough. + original_H_pred = copy.copy(H._pred) + R = build_residual_network(H, "capacity") + kwargs = {"capacity": "capacity", "residual": R} + # Define default flow function + if flow_func is None: + flow_func = default_flow_func + if flow_func is shortest_augmenting_path: + kwargs["two_phase"] = True + # Begin the actual algorithm + # step 1: Find node connectivity k of G + if k is None: + k = nx.node_connectivity(G, flow_func=flow_func) + # step 2: + # Find k nodes with top degree, call it X: + X = {n for n, d in sorted(G.degree(), key=itemgetter(1), reverse=True)[:k]} + # Check if X is a k-node-cutset + if _is_separating_set(G, X): + seen.append(X) + yield X + + for x in X: + # step 3: Compute local connectivity flow of x with all other + # non adjacent nodes in G + non_adjacent = set(G) - {x} - set(G[x]) + for v in non_adjacent: + # step 4: compute maximum flow in an Even-Tarjan reduction H of G + # and step 5: build the associated residual network R + R = flow_func(H, f"{mapping[x]}B", f"{mapping[v]}A", **kwargs) + flow_value = R.graph["flow_value"] + + if flow_value == k: + # Find the nodes incident to the flow. + E1 = flowed_edges = [ + (u, w) for (u, w, d) in R.edges(data=True) if d["flow"] != 0 + ] + VE1 = incident_nodes = {n for edge in E1 for n in edge} + # Remove saturated edges form the residual network. + # Note that reversed edges are introduced with capacity 0 + # in the residual graph and they need to be removed too. + saturated_edges = [ + (u, w, d) + for (u, w, d) in R.edges(data=True) + if d["capacity"] == d["flow"] or d["capacity"] == 0 + ] + R.remove_edges_from(saturated_edges) + R_closure = nx.transitive_closure(R) + # step 6: shrink the strongly connected components of + # residual flow network R and call it L. + L = nx.condensation(R) + cmap = L.graph["mapping"] + inv_cmap = defaultdict(list) + for n, scc in cmap.items(): + inv_cmap[scc].append(n) + # Find the incident nodes in the condensed graph. + VE1 = {cmap[n] for n in VE1} + # step 7: Compute all antichains of L; + # they map to closed sets in H. + # Any edge in H that links a closed set is part of a cutset. + for antichain in nx.antichains(L): + # Only antichains that are subsets of incident nodes counts. + # Lemma 8 in reference. + if not set(antichain).issubset(VE1): + continue + # Nodes in an antichain of the condensation graph of + # the residual network map to a closed set of nodes that + # define a node partition of the auxiliary digraph H + # through taking all of antichain's predecessors in the + # transitive closure. + S = set() + for scc in antichain: + S.update(inv_cmap[scc]) + S_ancestors = set() + for n in S: + S_ancestors.update(R_closure._pred[n]) + S.update(S_ancestors) + if f"{mapping[x]}B" not in S or f"{mapping[v]}A" in S: + continue + # Find the cutset that links the node partition (S,~S) in H + cutset = set() + for u in S: + cutset.update((u, w) for w in original_H_pred[u] if w not in S) + # The edges in H that form the cutset are internal edges + # (ie edges that represent a node of the original graph G) + if any(H_nodes[u]["id"] != H_nodes[w]["id"] for u, w in cutset): + continue + node_cut = {H_nodes[u]["id"] for u, _ in cutset} + + if len(node_cut) == k: + # The cut is invalid if it includes internal edges of + # end nodes. The other half of Lemma 8 in ref. + if x in node_cut or v in node_cut: + continue + if node_cut not in seen: + yield node_cut + seen.append(node_cut) + + # Add an edge (x, v) to make sure that we do not + # find this cutset again. This is equivalent + # of adding the edge in the input graph + # G.add_edge(x, v) and then regenerate H and R: + # Add edges to the auxiliary digraph. + # See build_residual_network for convention we used + # in residual graphs. + H.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1) + H.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1) + # Add edges to the residual network. + R.add_edge(f"{mapping[x]}B", f"{mapping[v]}A", capacity=1) + R.add_edge(f"{mapping[v]}A", f"{mapping[x]}B", capacity=0) + R.add_edge(f"{mapping[v]}B", f"{mapping[x]}A", capacity=1) + R.add_edge(f"{mapping[x]}A", f"{mapping[v]}B", capacity=0) + + # Add again the saturated edges to reuse the residual network + R.add_edges_from(saturated_edges) + + +def _is_separating_set(G, cut): + """Assumes that the input graph is connected""" + if len(cut) == len(G) - 1: + return True + + H = nx.restricted_view(G, cut, []) + if nx.is_connected(H): + return False + return True diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/stoerwagner.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/stoerwagner.py new file mode 100644 index 0000000..29604b1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/stoerwagner.py @@ -0,0 +1,152 @@ +""" +Stoer-Wagner minimum cut algorithm. +""" + +from itertools import islice + +import networkx as nx + +from ...utils import BinaryHeap, arbitrary_element, not_implemented_for + +__all__ = ["stoer_wagner"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def stoer_wagner(G, weight="weight", heap=BinaryHeap): + r"""Returns the weighted minimum edge cut using the Stoer-Wagner algorithm. + + Determine the minimum edge cut of a connected graph using the + Stoer-Wagner algorithm. In weighted cases, all weights must be + nonnegative. + + The running time of the algorithm depends on the type of heaps used: + + ============== ============================================= + Type of heap Running time + ============== ============================================= + Binary heap $O(n (m + n) \log n)$ + Fibonacci heap $O(nm + n^2 \log n)$ + Pairing heap $O(2^{2 \sqrt{\log \log n}} nm + n^2 \log n)$ + ============== ============================================= + + Parameters + ---------- + G : NetworkX graph + Edges of the graph are expected to have an attribute named by the + weight parameter below. If this attribute is not present, the edge is + considered to have unit weight. + + weight : string + Name of the weight attribute of the edges. If the attribute is not + present, unit weight is assumed. Default value: 'weight'. + + heap : class + Type of heap to be used in the algorithm. It should be a subclass of + :class:`MinHeap` or implement a compatible interface. + + If a stock heap implementation is to be used, :class:`BinaryHeap` is + recommended over :class:`PairingHeap` for Python implementations without + optimized attribute accesses (e.g., CPython) despite a slower + asymptotic running time. For Python implementations with optimized + attribute accesses (e.g., PyPy), :class:`PairingHeap` provides better + performance. Default value: :class:`BinaryHeap`. + + Returns + ------- + cut_value : integer or float + The sum of weights of edges in a minimum cut. + + partition : pair of node lists + A partitioning of the nodes that defines a minimum cut. + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or a multigraph. + + NetworkXError + If the graph has less than two nodes, is not connected or has a + negative-weighted edge. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edge("x", "a", weight=3) + >>> G.add_edge("x", "b", weight=1) + >>> G.add_edge("a", "c", weight=3) + >>> G.add_edge("b", "c", weight=5) + >>> G.add_edge("b", "d", weight=4) + >>> G.add_edge("d", "e", weight=2) + >>> G.add_edge("c", "y", weight=2) + >>> G.add_edge("e", "y", weight=3) + >>> cut_value, partition = nx.stoer_wagner(G) + >>> cut_value + 4 + """ + n = len(G) + if n < 2: + raise nx.NetworkXError("graph has less than two nodes.") + if not nx.is_connected(G): + raise nx.NetworkXError("graph is not connected.") + + # Make a copy of the graph for internal use. + G = nx.Graph( + (u, v, {"weight": e.get(weight, 1)}) for u, v, e in G.edges(data=True) if u != v + ) + G.__networkx_cache__ = None # Disable caching + + for u, v, e in G.edges(data=True): + if e["weight"] < 0: + raise nx.NetworkXError("graph has a negative-weighted edge.") + + cut_value = float("inf") + nodes = set(G) + contractions = [] # contracted node pairs + + # Repeatedly pick a pair of nodes to contract until only one node is left. + for i in range(n - 1): + # Pick an arbitrary node u and create a set A = {u}. + u = arbitrary_element(G) + A = {u} + # Repeatedly pick the node "most tightly connected" to A and add it to + # A. The tightness of connectivity of a node not in A is defined by the + # of edges connecting it to nodes in A. + h = heap() # min-heap emulating a max-heap + for v, e in G[u].items(): + h.insert(v, -e["weight"]) + # Repeat until all but one node has been added to A. + for j in range(n - i - 2): + u = h.pop()[0] + A.add(u) + for v, e in G[u].items(): + if v not in A: + h.insert(v, h.get(v, 0) - e["weight"]) + # A and the remaining node v define a "cut of the phase". There is a + # minimum cut of the original graph that is also a cut of the phase. + # Due to contractions in earlier phases, v may in fact represent + # multiple nodes in the original graph. + v, w = h.min() + w = -w + if w < cut_value: + cut_value = w + best_phase = i + # Contract v and the last node added to A. + contractions.append((u, v)) + for w, e in G[v].items(): + if w != u: + if w not in G[u]: + G.add_edge(u, w, weight=e["weight"]) + else: + G[u][w]["weight"] += e["weight"] + G.remove_node(v) + + # Recover the optimal partitioning from the contractions. + G = nx.Graph(islice(contractions, best_phase)) + v = contractions[best_phase][1] + G.add_node(v) + reachable = set(nx.single_source_shortest_path_length(G, v)) + partition = (list(reachable), list(nodes - reachable)) + + return cut_value, partition diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..28b7cde Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_connectivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_connectivity.cpython-312.pyc new file mode 100644 index 0000000..a3997b6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_connectivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_cuts.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_cuts.cpython-312.pyc new file mode 100644 index 0000000..9e0bdbb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_cuts.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_disjoint_paths.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_disjoint_paths.cpython-312.pyc new file mode 100644 index 0000000..2176140 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_disjoint_paths.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_augmentation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_augmentation.cpython-312.pyc new file mode 100644 index 0000000..e8d510e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_augmentation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_kcomponents.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_kcomponents.cpython-312.pyc new file mode 100644 index 0000000..922ec56 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_edge_kcomponents.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-312.pyc new file mode 100644 index 0000000..f0d4bf2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcomponents.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcutsets.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcutsets.cpython-312.pyc new file mode 100644 index 0000000..9c0a4c6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_kcutsets.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_stoer_wagner.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_stoer_wagner.cpython-312.pyc new file mode 100644 index 0000000..f1551d7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/__pycache__/test_stoer_wagner.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_connectivity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_connectivity.py new file mode 100644 index 0000000..7aef247 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_connectivity.py @@ -0,0 +1,421 @@ +import itertools + +import pytest + +import networkx as nx +from networkx.algorithms import flow +from networkx.algorithms.connectivity import ( + local_edge_connectivity, + local_node_connectivity, +) + +flow_funcs = [ + flow.boykov_kolmogorov, + flow.dinitz, + flow.edmonds_karp, + flow.preflow_push, + flow.shortest_augmenting_path, +] + + +# helper functions for tests + + +def _generate_no_biconnected(max_attempts=50): + attempts = 0 + while True: + G = nx.fast_gnp_random_graph(100, 0.0575, seed=42) + if nx.is_connected(G) and not nx.is_biconnected(G): + attempts = 0 + yield G + else: + if attempts >= max_attempts: + msg = f"Tried {max_attempts} times: no suitable Graph." + raise Exception(msg) + else: + attempts += 1 + + +def test_average_connectivity(): + # figure 1 from: + # Beineke, L., O. Oellermann, and R. Pippert (2002). The average + # connectivity of a graph. Discrete mathematics 252(1-3), 31-45 + # http://www.sciencedirect.com/science/article/pii/S0012365X01001807 + G1 = nx.path_graph(3) + G1.add_edges_from([(1, 3), (1, 4)]) + G2 = nx.path_graph(3) + G2.add_edges_from([(1, 3), (1, 4), (0, 3), (0, 4), (3, 4)]) + G3 = nx.Graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.average_node_connectivity(G1, **kwargs) == 1, errmsg + assert nx.average_node_connectivity(G2, **kwargs) == 2.2, errmsg + assert nx.average_node_connectivity(G3, **kwargs) == 0, errmsg + + +def test_average_connectivity_directed(): + G = nx.DiGraph([(1, 3), (1, 4), (1, 5)]) + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.average_node_connectivity(G) == 0.25, errmsg + + +def test_articulation_points(): + Ggen = _generate_no_biconnected() + for flow_func in flow_funcs: + for i in range(3): + G = next(Ggen) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.node_connectivity(G, flow_func=flow_func) == 1, errmsg + + +def test_brandes_erlebach(): + # Figure 1 chapter 7: Connectivity + # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf + G = nx.Graph() + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 3 == local_edge_connectivity(G, 1, 11, **kwargs), errmsg + assert 3 == nx.edge_connectivity(G, 1, 11, **kwargs), errmsg + assert 2 == local_node_connectivity(G, 1, 11, **kwargs), errmsg + assert 2 == nx.node_connectivity(G, 1, 11, **kwargs), errmsg + assert 2 == nx.edge_connectivity(G, **kwargs), errmsg + assert 2 == nx.node_connectivity(G, **kwargs), errmsg + if flow_func is flow.preflow_push: + assert 3 == nx.edge_connectivity(G, 1, 11, cutoff=2, **kwargs), errmsg + else: + assert 2 == nx.edge_connectivity(G, 1, 11, cutoff=2, **kwargs), errmsg + + +def test_white_harary_1(): + # Figure 1b white and harary (2001) + # https://doi.org/10.1111/0081-1750.00098 + # A graph with high adhesion (edge connectivity) and low cohesion + # (vertex connectivity) + G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4)) + G.remove_node(7) + for i in range(4, 7): + G.add_edge(0, i) + G = nx.disjoint_union(G, nx.complete_graph(4)) + G.remove_node(G.order() - 1) + for i in range(7, 10): + G.add_edge(0, i) + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + + +def test_white_harary_2(): + # Figure 8 white and harary (2001) + # https://doi.org/10.1111/0081-1750.00098 + G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4)) + G.add_edge(0, 4) + # kappa <= lambda <= delta + assert 3 == min(nx.core_number(G).values()) + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + + +def test_complete_graphs(): + for n in range(5, 20, 5): + for flow_func in flow_funcs: + G = nx.complete_graph(n) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert n - 1 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert n - 1 == nx.node_connectivity( + G.to_directed(), flow_func=flow_func + ), errmsg + assert n - 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + assert n - 1 == nx.edge_connectivity( + G.to_directed(), flow_func=flow_func + ), errmsg + + +def test_empty_graphs(): + for k in range(5, 25, 5): + G = nx.empty_graph(k) + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 0 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 0 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + + +def test_petersen(): + G = nx.petersen_graph() + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + + +def test_tutte(): + G = nx.tutte_graph() + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + + +def test_dodecahedral(): + G = nx.dodecahedral_graph() + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 3 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 3 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + + +def test_octahedral(): + G = nx.octahedral_graph() + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 4 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 4 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + + +def test_icosahedral(): + G = nx.icosahedral_graph() + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 5 == nx.node_connectivity(G, flow_func=flow_func), errmsg + assert 5 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + + +def test_missing_source(): + G = nx.path_graph(4) + for flow_func in flow_funcs: + pytest.raises( + nx.NetworkXError, nx.node_connectivity, G, 10, 1, flow_func=flow_func + ) + + +def test_missing_target(): + G = nx.path_graph(4) + for flow_func in flow_funcs: + pytest.raises( + nx.NetworkXError, nx.node_connectivity, G, 1, 10, flow_func=flow_func + ) + + +def test_edge_missing_source(): + G = nx.path_graph(4) + for flow_func in flow_funcs: + pytest.raises( + nx.NetworkXError, nx.edge_connectivity, G, 10, 1, flow_func=flow_func + ) + + +def test_edge_missing_target(): + G = nx.path_graph(4) + for flow_func in flow_funcs: + pytest.raises( + nx.NetworkXError, nx.edge_connectivity, G, 1, 10, flow_func=flow_func + ) + + +def test_not_weakly_connected(): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.node_connectivity(G) == 0, errmsg + assert nx.edge_connectivity(G) == 0, errmsg + + +def test_not_connected(): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert nx.node_connectivity(G) == 0, errmsg + assert nx.edge_connectivity(G) == 0, errmsg + + +def test_directed_edge_connectivity(): + G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction + D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert 1 == nx.edge_connectivity(G, flow_func=flow_func), errmsg + assert 1 == local_edge_connectivity(G, 1, 4, flow_func=flow_func), errmsg + assert 1 == nx.edge_connectivity(G, 1, 4, flow_func=flow_func), errmsg + assert 2 == nx.edge_connectivity(D, flow_func=flow_func), errmsg + assert 2 == local_edge_connectivity(D, 1, 4, flow_func=flow_func), errmsg + assert 2 == nx.edge_connectivity(D, 1, 4, flow_func=flow_func), errmsg + + +def test_cutoff(): + G = nx.complete_graph(5) + for local_func in [local_edge_connectivity, local_node_connectivity]: + for flow_func in flow_funcs: + if flow_func is flow.preflow_push: + # cutoff is not supported by preflow_push + continue + for cutoff in [3, 2, 1]: + result = local_func(G, 0, 4, flow_func=flow_func, cutoff=cutoff) + assert cutoff == result, f"cutoff error in {flow_func.__name__}" + + +def test_invalid_auxiliary(): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXError, local_node_connectivity, G, 0, 3, auxiliary=G) + + +def test_interface_only_source(): + G = nx.complete_graph(5) + for interface_func in [nx.node_connectivity, nx.edge_connectivity]: + pytest.raises(nx.NetworkXError, interface_func, G, s=0) + + +def test_interface_only_target(): + G = nx.complete_graph(5) + for interface_func in [nx.node_connectivity, nx.edge_connectivity]: + pytest.raises(nx.NetworkXError, interface_func, G, t=3) + + +def test_edge_connectivity_flow_vs_stoer_wagner(): + graph_funcs = [nx.icosahedral_graph, nx.octahedral_graph, nx.dodecahedral_graph] + for graph_func in graph_funcs: + G = graph_func() + assert nx.stoer_wagner(G)[0] == nx.edge_connectivity(G) + + +class TestAllPairsNodeConnectivity: + @classmethod + def setup_class(cls): + cls.path = nx.path_graph(7) + cls.directed_path = nx.path_graph(7, create_using=nx.DiGraph()) + cls.cycle = nx.cycle_graph(7) + cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + cls.gnp = nx.gnp_random_graph(30, 0.1, seed=42) + cls.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True, seed=42) + cls.K20 = nx.complete_graph(20) + cls.K10 = nx.complete_graph(10) + cls.K5 = nx.complete_graph(5) + cls.G_list = [ + cls.path, + cls.directed_path, + cls.cycle, + cls.directed_cycle, + cls.gnp, + cls.directed_gnp, + cls.K10, + cls.K5, + cls.K20, + ] + + def test_cycles(self): + K_undir = nx.all_pairs_node_connectivity(self.cycle) + for source in K_undir: + for target, k in K_undir[source].items(): + assert k == 2 + K_dir = nx.all_pairs_node_connectivity(self.directed_cycle) + for source in K_dir: + for target, k in K_dir[source].items(): + assert k == 1 + + def test_complete(self): + for G in [self.K10, self.K5, self.K20]: + K = nx.all_pairs_node_connectivity(G) + for source in K: + for target, k in K[source].items(): + assert k == len(G) - 1 + + def test_paths(self): + K_undir = nx.all_pairs_node_connectivity(self.path) + for source in K_undir: + for target, k in K_undir[source].items(): + assert k == 1 + K_dir = nx.all_pairs_node_connectivity(self.directed_path) + for source in K_dir: + for target, k in K_dir[source].items(): + if source < target: + assert k == 1 + else: + assert k == 0 + + def test_all_pairs_connectivity_nbunch(self): + G = nx.complete_graph(5) + nbunch = [0, 2, 3] + C = nx.all_pairs_node_connectivity(G, nbunch=nbunch) + assert len(C) == len(nbunch) + + def test_all_pairs_connectivity_icosahedral(self): + G = nx.icosahedral_graph() + C = nx.all_pairs_node_connectivity(G) + assert all(5 == C[u][v] for u, v in itertools.combinations(G, 2)) + + def test_all_pairs_connectivity(self): + G = nx.Graph() + nodes = [0, 1, 2, 3] + nx.add_path(G, nodes) + A = {n: {} for n in G} + for u, v in itertools.combinations(nodes, 2): + A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) + C = nx.all_pairs_node_connectivity(G) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) + + def test_all_pairs_connectivity_directed(self): + G = nx.DiGraph() + nodes = [0, 1, 2, 3] + nx.add_path(G, nodes) + A = {n: {} for n in G} + for u, v in itertools.permutations(nodes, 2): + A[u][v] = nx.node_connectivity(G, u, v) + C = nx.all_pairs_node_connectivity(G) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) + + def test_all_pairs_connectivity_nbunch_combinations(self): + G = nx.complete_graph(5) + nbunch = [0, 2, 3] + A = {n: {} for n in nbunch} + for u, v in itertools.combinations(nbunch, 2): + A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) + C = nx.all_pairs_node_connectivity(G, nbunch=nbunch) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) + + def test_all_pairs_connectivity_nbunch_iter(self): + G = nx.complete_graph(5) + nbunch = [0, 2, 3] + A = {n: {} for n in nbunch} + for u, v in itertools.combinations(nbunch, 2): + A[u][v] = A[v][u] = nx.node_connectivity(G, u, v) + C = nx.all_pairs_node_connectivity(G, nbunch=iter(nbunch)) + assert sorted((k, sorted(v)) for k, v in A.items()) == sorted( + (k, sorted(v)) for k, v in C.items() + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_cuts.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_cuts.py new file mode 100644 index 0000000..964aff9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_cuts.py @@ -0,0 +1,309 @@ +import pytest + +import networkx as nx +from networkx.algorithms import flow +from networkx.algorithms.connectivity import minimum_st_edge_cut, minimum_st_node_cut +from networkx.utils import arbitrary_element + +flow_funcs = [ + flow.boykov_kolmogorov, + flow.dinitz, + flow.edmonds_karp, + flow.preflow_push, + flow.shortest_augmenting_path, +] + +# Tests for node and edge cutsets + + +def _generate_no_biconnected(max_attempts=50): + attempts = 0 + while True: + G = nx.fast_gnp_random_graph(100, 0.0575, seed=42) + if nx.is_connected(G) and not nx.is_biconnected(G): + attempts = 0 + yield G + else: + if attempts >= max_attempts: + msg = f"Tried {attempts} times: no suitable Graph." + raise Exception(msg) + else: + attempts += 1 + + +def test_articulation_points(): + Ggen = _generate_no_biconnected() + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + for i in range(1): # change 1 to 3 or more for more realizations. + G = next(Ggen) + cut = nx.minimum_node_cut(G, flow_func=flow_func) + assert len(cut) == 1, errmsg + assert cut.pop() in set(nx.articulation_points(G)), errmsg + + +def test_brandes_erlebach_book(): + # Figure 1 chapter 7: Connectivity + # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf + G = nx.Graph() + G.add_edges_from( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (2, 3), + (2, 6), + (3, 4), + (3, 6), + (4, 6), + (4, 7), + (5, 7), + (6, 8), + (6, 9), + (7, 8), + (7, 10), + (8, 11), + (9, 10), + (9, 11), + (10, 11), + ] + ) + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge cutsets + assert 3 == len(nx.minimum_edge_cut(G, 1, 11, **kwargs)), errmsg + edge_cut = nx.minimum_edge_cut(G, **kwargs) + # Node 5 has only two edges + assert 2 == len(edge_cut), errmsg + H = G.copy() + H.remove_edges_from(edge_cut) + assert not nx.is_connected(H), errmsg + # node cuts + assert {6, 7} == minimum_st_node_cut(G, 1, 11, **kwargs), errmsg + assert {6, 7} == nx.minimum_node_cut(G, 1, 11, **kwargs), errmsg + node_cut = nx.minimum_node_cut(G, **kwargs) + assert 2 == len(node_cut), errmsg + H = G.copy() + H.remove_nodes_from(node_cut) + assert not nx.is_connected(H), errmsg + + +def test_white_harary_paper(): + # Figure 1b white and harary (2001) + # https://doi.org/10.1111/0081-1750.00098 + # A graph with high adhesion (edge connectivity) and low cohesion + # (node connectivity) + G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4)) + G.remove_node(7) + for i in range(4, 7): + G.add_edge(0, i) + G = nx.disjoint_union(G, nx.complete_graph(4)) + G.remove_node(G.order() - 1) + for i in range(7, 10): + G.add_edge(0, i) + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge cuts + edge_cut = nx.minimum_edge_cut(G, **kwargs) + assert 3 == len(edge_cut), errmsg + H = G.copy() + H.remove_edges_from(edge_cut) + assert not nx.is_connected(H), errmsg + # node cuts + node_cut = nx.minimum_node_cut(G, **kwargs) + assert {0} == node_cut, errmsg + H = G.copy() + H.remove_nodes_from(node_cut) + assert not nx.is_connected(H), errmsg + + +def test_petersen_cutset(): + G = nx.petersen_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge cuts + edge_cut = nx.minimum_edge_cut(G, **kwargs) + assert 3 == len(edge_cut), errmsg + H = G.copy() + H.remove_edges_from(edge_cut) + assert not nx.is_connected(H), errmsg + # node cuts + node_cut = nx.minimum_node_cut(G, **kwargs) + assert 3 == len(node_cut), errmsg + H = G.copy() + H.remove_nodes_from(node_cut) + assert not nx.is_connected(H), errmsg + + +def test_octahedral_cutset(): + G = nx.octahedral_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge cuts + edge_cut = nx.minimum_edge_cut(G, **kwargs) + assert 4 == len(edge_cut), errmsg + H = G.copy() + H.remove_edges_from(edge_cut) + assert not nx.is_connected(H), errmsg + # node cuts + node_cut = nx.minimum_node_cut(G, **kwargs) + assert 4 == len(node_cut), errmsg + H = G.copy() + H.remove_nodes_from(node_cut) + assert not nx.is_connected(H), errmsg + + +def test_icosahedral_cutset(): + G = nx.icosahedral_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge cuts + edge_cut = nx.minimum_edge_cut(G, **kwargs) + assert 5 == len(edge_cut), errmsg + H = G.copy() + H.remove_edges_from(edge_cut) + assert not nx.is_connected(H), errmsg + # node cuts + node_cut = nx.minimum_node_cut(G, **kwargs) + assert 5 == len(node_cut), errmsg + H = G.copy() + H.remove_nodes_from(node_cut) + assert not nx.is_connected(H), errmsg + + +def test_node_cutset_exception(): + G = nx.Graph() + G.add_edges_from([(1, 2), (3, 4)]) + for flow_func in flow_funcs: + pytest.raises(nx.NetworkXError, nx.minimum_node_cut, G, flow_func=flow_func) + + +def test_node_cutset_random_graphs(): + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + for i in range(3): + G = nx.fast_gnp_random_graph(50, 0.25, seed=42) + if not nx.is_connected(G): + ccs = iter(nx.connected_components(G)) + start = arbitrary_element(next(ccs)) + G.add_edges_from((start, arbitrary_element(c)) for c in ccs) + cutset = nx.minimum_node_cut(G, flow_func=flow_func) + assert nx.node_connectivity(G) == len(cutset), errmsg + G.remove_nodes_from(cutset) + assert not nx.is_connected(G), errmsg + + +def test_edge_cutset_random_graphs(): + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + for i in range(3): + G = nx.fast_gnp_random_graph(50, 0.25, seed=42) + if not nx.is_connected(G): + ccs = iter(nx.connected_components(G)) + start = arbitrary_element(next(ccs)) + G.add_edges_from((start, arbitrary_element(c)) for c in ccs) + cutset = nx.minimum_edge_cut(G, flow_func=flow_func) + assert nx.edge_connectivity(G) == len(cutset), errmsg + G.remove_edges_from(cutset) + assert not nx.is_connected(G), errmsg + + +def test_empty_graphs(): + G = nx.Graph() + D = nx.DiGraph() + for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]: + for flow_func in flow_funcs: + pytest.raises( + nx.NetworkXPointlessConcept, interface_func, G, flow_func=flow_func + ) + pytest.raises( + nx.NetworkXPointlessConcept, interface_func, D, flow_func=flow_func + ) + + +def test_unbounded(): + G = nx.complete_graph(5) + for flow_func in flow_funcs: + assert 4 == len(minimum_st_edge_cut(G, 1, 4, flow_func=flow_func)) + + +def test_missing_source(): + G = nx.path_graph(4) + for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: + for flow_func in flow_funcs: + pytest.raises( + nx.NetworkXError, interface_func, G, 10, 1, flow_func=flow_func + ) + + +def test_missing_target(): + G = nx.path_graph(4) + for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: + for flow_func in flow_funcs: + pytest.raises( + nx.NetworkXError, interface_func, G, 1, 10, flow_func=flow_func + ) + + +def test_not_weakly_connected(): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: + for flow_func in flow_funcs: + pytest.raises(nx.NetworkXError, interface_func, G, flow_func=flow_func) + + +def test_not_connected(): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: + for flow_func in flow_funcs: + pytest.raises(nx.NetworkXError, interface_func, G, flow_func=flow_func) + + +def tests_min_cut_complete(): + G = nx.complete_graph(5) + for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: + for flow_func in flow_funcs: + assert 4 == len(interface_func(G, flow_func=flow_func)) + + +def tests_min_cut_complete_directed(): + G = nx.complete_graph(5) + G = G.to_directed() + for interface_func in [nx.minimum_edge_cut, nx.minimum_node_cut]: + for flow_func in flow_funcs: + assert 4 == len(interface_func(G, flow_func=flow_func)) + + +def tests_minimum_st_node_cut(): + G = nx.Graph() + G.add_nodes_from([0, 1, 2, 3, 7, 8, 11, 12]) + G.add_edges_from([(7, 11), (1, 11), (1, 12), (12, 8), (0, 1)]) + nodelist = minimum_st_node_cut(G, 7, 11) + assert nodelist == set() + + +def test_invalid_auxiliary(): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXError, minimum_st_node_cut, G, 0, 3, auxiliary=G) + + +def test_interface_only_source(): + G = nx.complete_graph(5) + for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]: + pytest.raises(nx.NetworkXError, interface_func, G, s=0) + + +def test_interface_only_target(): + G = nx.complete_graph(5) + for interface_func in [nx.minimum_node_cut, nx.minimum_edge_cut]: + pytest.raises(nx.NetworkXError, interface_func, G, t=3) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_disjoint_paths.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_disjoint_paths.py new file mode 100644 index 0000000..0c0fad9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_disjoint_paths.py @@ -0,0 +1,249 @@ +import pytest + +import networkx as nx +from networkx.algorithms import flow +from networkx.utils import pairwise + +flow_funcs = [ + flow.boykov_kolmogorov, + flow.edmonds_karp, + flow.dinitz, + flow.preflow_push, + flow.shortest_augmenting_path, +] + + +def is_path(G, path): + return all(v in G[u] for u, v in pairwise(path)) + + +def are_edge_disjoint_paths(G, paths): + if not paths: + return False + for path in paths: + assert is_path(G, path) + paths_edges = [list(pairwise(p)) for p in paths] + num_of_edges = sum(len(e) for e in paths_edges) + num_unique_edges = len(set.union(*[set(es) for es in paths_edges])) + if num_of_edges == num_unique_edges: + return True + return False + + +def are_node_disjoint_paths(G, paths): + if not paths: + return False + for path in paths: + assert is_path(G, path) + # first and last nodes are source and target + st = {paths[0][0], paths[0][-1]} + num_of_nodes = len([n for path in paths for n in path if n not in st]) + num_unique_nodes = len({n for path in paths for n in path if n not in st}) + if num_of_nodes == num_unique_nodes: + return True + return False + + +def test_graph_from_pr_2053(): + G = nx.Graph() + G.add_edges_from( + [ + ("A", "B"), + ("A", "D"), + ("A", "F"), + ("A", "G"), + ("B", "C"), + ("B", "D"), + ("B", "G"), + ("C", "D"), + ("C", "E"), + ("C", "Z"), + ("D", "E"), + ("D", "F"), + ("E", "F"), + ("E", "Z"), + ("F", "Z"), + ("G", "Z"), + ] + ) + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge disjoint paths + edge_paths = list(nx.edge_disjoint_paths(G, "A", "Z", **kwargs)) + assert are_edge_disjoint_paths(G, edge_paths), errmsg + assert nx.edge_connectivity(G, "A", "Z") == len(edge_paths), errmsg + # node disjoint paths + node_paths = list(nx.node_disjoint_paths(G, "A", "Z", **kwargs)) + assert are_node_disjoint_paths(G, node_paths), errmsg + assert nx.node_connectivity(G, "A", "Z") == len(node_paths), errmsg + + +def test_florentine_families(): + G = nx.florentine_families_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge disjoint paths + edge_dpaths = list(nx.edge_disjoint_paths(G, "Medici", "Strozzi", **kwargs)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert nx.edge_connectivity(G, "Medici", "Strozzi") == len(edge_dpaths), errmsg + # node disjoint paths + node_dpaths = list(nx.node_disjoint_paths(G, "Medici", "Strozzi", **kwargs)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert nx.node_connectivity(G, "Medici", "Strozzi") == len(node_dpaths), errmsg + + +def test_karate(): + G = nx.karate_club_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge disjoint paths + edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 33, **kwargs)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert nx.edge_connectivity(G, 0, 33) == len(edge_dpaths), errmsg + # node disjoint paths + node_dpaths = list(nx.node_disjoint_paths(G, 0, 33, **kwargs)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert nx.node_connectivity(G, 0, 33) == len(node_dpaths), errmsg + + +def test_petersen_disjoint_paths(): + G = nx.petersen_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge disjoint paths + edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert 3 == len(edge_dpaths), errmsg + # node disjoint paths + node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert 3 == len(node_dpaths), errmsg + + +def test_octahedral_disjoint_paths(): + G = nx.octahedral_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge disjoint paths + edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 5, **kwargs)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert 4 == len(edge_dpaths), errmsg + # node disjoint paths + node_dpaths = list(nx.node_disjoint_paths(G, 0, 5, **kwargs)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert 4 == len(node_dpaths), errmsg + + +def test_icosahedral_disjoint_paths(): + G = nx.icosahedral_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + # edge disjoint paths + edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert 5 == len(edge_dpaths), errmsg + # node disjoint paths + node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert 5 == len(node_dpaths), errmsg + + +def test_cutoff_disjoint_paths(): + G = nx.icosahedral_graph() + for flow_func in flow_funcs: + kwargs = {"flow_func": flow_func} + errmsg = f"Assertion failed in function: {flow_func.__name__}" + for cutoff in [2, 4]: + kwargs["cutoff"] = cutoff + # edge disjoint paths + edge_dpaths = list(nx.edge_disjoint_paths(G, 0, 6, **kwargs)) + assert are_edge_disjoint_paths(G, edge_dpaths), errmsg + assert cutoff == len(edge_dpaths), errmsg + # node disjoint paths + node_dpaths = list(nx.node_disjoint_paths(G, 0, 6, **kwargs)) + assert are_node_disjoint_paths(G, node_dpaths), errmsg + assert cutoff == len(node_dpaths), errmsg + + +def test_missing_source_edge_paths(): + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + list(nx.edge_disjoint_paths(G, 10, 1)) + + +def test_missing_source_node_paths(): + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + list(nx.node_disjoint_paths(G, 10, 1)) + + +def test_missing_target_edge_paths(): + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + list(nx.edge_disjoint_paths(G, 1, 10)) + + +def test_missing_target_node_paths(): + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + list(nx.node_disjoint_paths(G, 1, 10)) + + +def test_not_weakly_connected_edges(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + list(nx.edge_disjoint_paths(G, 1, 5)) + + +def test_not_weakly_connected_nodes(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + list(nx.node_disjoint_paths(G, 1, 5)) + + +def test_not_connected_edges(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + list(nx.edge_disjoint_paths(G, 1, 5)) + + +def test_not_connected_nodes(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5]) + list(nx.node_disjoint_paths(G, 1, 5)) + + +def test_isolated_edges(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + G.add_node(1) + nx.add_path(G, [4, 5]) + list(nx.edge_disjoint_paths(G, 1, 5)) + + +def test_isolated_nodes(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + G.add_node(1) + nx.add_path(G, [4, 5]) + list(nx.node_disjoint_paths(G, 1, 5)) + + +def test_invalid_auxiliary(): + with pytest.raises(nx.NetworkXError): + G = nx.complete_graph(5) + list(nx.node_disjoint_paths(G, 0, 3, auxiliary=G)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_augmentation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_augmentation.py new file mode 100644 index 0000000..b580e5b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_augmentation.py @@ -0,0 +1,502 @@ +import itertools as it +import random + +import pytest + +import networkx as nx +from networkx.algorithms.connectivity import k_edge_augmentation +from networkx.algorithms.connectivity.edge_augmentation import ( + _unpack_available_edges, + collapse, + complement_edges, + is_k_edge_connected, + is_locally_k_edge_connected, +) +from networkx.utils import pairwise + +# This should be set to the largest k for which an efficient algorithm is +# explicitly defined. +MAX_EFFICIENT_K = 2 + + +def tarjan_bridge_graph(): + # graph from tarjan paper + # RE Tarjan - "A note on finding the bridges of a graph" + # Information Processing Letters, 1974 - Elsevier + # doi:10.1016/0020-0190(74)90003-9. + # define 2-connected components and bridges + ccs = [ + (1, 2, 4, 3, 1, 4), + (5, 6, 7, 5), + (8, 9, 10, 8), + (17, 18, 16, 15, 17), + (11, 12, 14, 13, 11, 14), + ] + bridges = [(4, 8), (3, 5), (3, 17)] + G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges))) + return G + + +def test_weight_key(): + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4, 5, 6, 7, 8, 9]) + G.add_edges_from([(3, 8), (1, 2), (2, 3)]) + impossible = {(3, 6), (3, 9)} + rng = random.Random(0) + avail_uv = list(set(complement_edges(G)) - impossible) + avail = [(u, v, {"cost": rng.random()}) for u, v in avail_uv] + + _augment_and_check(G, k=1) + _augment_and_check(G, k=1, avail=avail_uv) + _augment_and_check(G, k=1, avail=avail, weight="cost") + + _check_augmentations(G, avail, weight="cost") + + +def test_is_locally_k_edge_connected_exceptions(): + pytest.raises(nx.NetworkXNotImplemented, is_k_edge_connected, nx.DiGraph(), k=0) + pytest.raises(nx.NetworkXNotImplemented, is_k_edge_connected, nx.MultiGraph(), k=0) + pytest.raises(ValueError, is_k_edge_connected, nx.Graph(), k=0) + + +def test_is_k_edge_connected(): + G = nx.barbell_graph(10, 0) + assert is_k_edge_connected(G, k=1) + assert not is_k_edge_connected(G, k=2) + + G = nx.Graph() + G.add_nodes_from([5, 15]) + assert not is_k_edge_connected(G, k=1) + assert not is_k_edge_connected(G, k=2) + + G = nx.complete_graph(5) + assert is_k_edge_connected(G, k=1) + assert is_k_edge_connected(G, k=2) + assert is_k_edge_connected(G, k=3) + assert is_k_edge_connected(G, k=4) + + G = nx.compose(nx.complete_graph([0, 1, 2]), nx.complete_graph([3, 4, 5])) + assert not is_k_edge_connected(G, k=1) + assert not is_k_edge_connected(G, k=2) + assert not is_k_edge_connected(G, k=3) + + +def test_is_k_edge_connected_exceptions(): + pytest.raises( + nx.NetworkXNotImplemented, is_locally_k_edge_connected, nx.DiGraph(), 1, 2, k=0 + ) + pytest.raises( + nx.NetworkXNotImplemented, + is_locally_k_edge_connected, + nx.MultiGraph(), + 1, + 2, + k=0, + ) + pytest.raises(ValueError, is_locally_k_edge_connected, nx.Graph(), 1, 2, k=0) + + +def test_is_locally_k_edge_connected(): + G = nx.barbell_graph(10, 0) + assert is_locally_k_edge_connected(G, 5, 15, k=1) + assert not is_locally_k_edge_connected(G, 5, 15, k=2) + + G = nx.Graph() + G.add_nodes_from([5, 15]) + assert not is_locally_k_edge_connected(G, 5, 15, k=2) + + +def test_null_graph(): + G = nx.Graph() + _check_augmentations(G, max_k=MAX_EFFICIENT_K + 2) + + +def test_cliques(): + for n in range(1, 10): + G = nx.complete_graph(n) + _check_augmentations(G, max_k=MAX_EFFICIENT_K + 2) + + +def test_clique_and_node(): + for n in range(1, 10): + G = nx.complete_graph(n) + G.add_node(n + 1) + _check_augmentations(G, max_k=MAX_EFFICIENT_K + 2) + + +def test_point_graph(): + G = nx.Graph() + G.add_node(1) + _check_augmentations(G, max_k=MAX_EFFICIENT_K + 2) + + +def test_edgeless_graph(): + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4]) + _check_augmentations(G) + + +def test_invalid_k(): + G = nx.Graph() + pytest.raises(ValueError, list, k_edge_augmentation(G, k=-1)) + pytest.raises(ValueError, list, k_edge_augmentation(G, k=0)) + + +def test_unfeasible(): + G = tarjan_bridge_graph() + pytest.raises(nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=1, avail=[])) + + pytest.raises(nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=2, avail=[])) + + pytest.raises( + nx.NetworkXUnfeasible, list, k_edge_augmentation(G, k=2, avail=[(7, 9)]) + ) + + # partial solutions should not error if real solutions are infeasible + aug_edges = list(k_edge_augmentation(G, k=2, avail=[(7, 9)], partial=True)) + assert aug_edges == [(7, 9)] + + _check_augmentations(G, avail=[], max_k=MAX_EFFICIENT_K + 2) + + _check_augmentations(G, avail=[(7, 9)], max_k=MAX_EFFICIENT_K + 2) + + +def test_tarjan(): + G = tarjan_bridge_graph() + + aug_edges = set(_augment_and_check(G, k=2)[0]) + print(f"aug_edges = {aug_edges!r}") + # can't assert edge exactly equality due to non-determinant edge order + # but we do know the size of the solution must be 3 + assert len(aug_edges) == 3 + + avail = [ + (9, 7), + (8, 5), + (2, 10), + (6, 13), + (11, 18), + (1, 17), + (2, 3), + (16, 17), + (18, 14), + (15, 14), + ] + aug_edges = set(_augment_and_check(G, avail=avail, k=2)[0]) + + # Can't assert exact length since approximation depends on the order of a + # dict traversal. + assert len(aug_edges) <= 3 * 2 + + _check_augmentations(G, avail) + + +def test_configuration(): + # seeds = [2718183590, 2470619828, 1694705158, 3001036531, 2401251497] + seeds = [1001, 1002, 1003, 1004] + for seed in seeds: + deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000) + G = nx.Graph(nx.configuration_model(deg_seq, seed=seed)) + G.remove_edges_from(nx.selfloop_edges(G)) + _check_augmentations(G) + + +def test_shell(): + # seeds = [2057382236, 3331169846, 1840105863, 476020778, 2247498425] + seeds = [18] + for seed in seeds: + constructor = [(12, 70, 0.8), (15, 40, 0.6)] + G = nx.random_shell_graph(constructor, seed=seed) + _check_augmentations(G) + + +def test_karate(): + G = nx.karate_club_graph() + _check_augmentations(G) + + +def test_star(): + G = nx.star_graph(3) + _check_augmentations(G) + + G = nx.star_graph(5) + _check_augmentations(G) + + G = nx.star_graph(10) + _check_augmentations(G) + + +def test_barbell(): + G = nx.barbell_graph(5, 0) + _check_augmentations(G) + + G = nx.barbell_graph(5, 2) + _check_augmentations(G) + + G = nx.barbell_graph(5, 3) + _check_augmentations(G) + + G = nx.barbell_graph(5, 4) + _check_augmentations(G) + + +def test_bridge(): + G = nx.Graph([(2393, 2257), (2393, 2685), (2685, 2257), (1758, 2257)]) + _check_augmentations(G) + + +def test_gnp_augmentation(): + rng = random.Random(0) + G = nx.gnp_random_graph(30, 0.005, seed=0) + # Randomly make edges available + avail = { + (u, v): 1 + rng.random() for u, v in complement_edges(G) if rng.random() < 0.25 + } + _check_augmentations(G, avail) + + +def _assert_solution_properties(G, aug_edges, avail_dict=None): + """Checks that aug_edges are consistently formatted""" + if avail_dict is not None: + assert all(e in avail_dict for e in aug_edges), ( + "when avail is specified aug-edges should be in avail" + ) + + unique_aug = set(map(tuple, map(sorted, aug_edges))) + unique_aug = list(map(tuple, map(sorted, aug_edges))) + assert len(aug_edges) == len(unique_aug), "edges should be unique" + + assert not any(u == v for u, v in unique_aug), "should be no self-edges" + + assert not any(G.has_edge(u, v) for u, v in unique_aug), ( + "aug edges and G.edges should be disjoint" + ) + + +def _augment_and_check( + G, k, avail=None, weight=None, verbose=False, orig_k=None, max_aug_k=None +): + """ + Does one specific augmentation and checks for properties of the result + """ + if orig_k is None: + try: + orig_k = nx.edge_connectivity(G) + except nx.NetworkXPointlessConcept: + orig_k = 0 + info = {} + try: + if avail is not None: + # ensure avail is in dict form + avail_dict = dict(zip(*_unpack_available_edges(avail, weight=weight))) + else: + avail_dict = None + try: + # Find the augmentation if possible + generator = nx.k_edge_augmentation(G, k=k, weight=weight, avail=avail) + assert not isinstance(generator, list), "should always return an iter" + aug_edges = [] + for edge in generator: + aug_edges.append(edge) + except nx.NetworkXUnfeasible: + infeasible = True + info["infeasible"] = True + assert len(aug_edges) == 0, "should not generate anything if unfeasible" + + if avail is None: + n_nodes = G.number_of_nodes() + assert n_nodes <= k, ( + "unconstrained cases are only unfeasible if |V| <= k. " + f"Got |V|={n_nodes} and k={k}" + ) + else: + if max_aug_k is None: + G_aug_all = G.copy() + G_aug_all.add_edges_from(avail_dict.keys()) + try: + max_aug_k = nx.edge_connectivity(G_aug_all) + except nx.NetworkXPointlessConcept: + max_aug_k = 0 + + assert max_aug_k < k, ( + "avail should only be unfeasible if using all edges " + "does not achieve k-edge-connectivity" + ) + + # Test for a partial solution + partial_edges = list( + nx.k_edge_augmentation(G, k=k, weight=weight, partial=True, avail=avail) + ) + + info["n_partial_edges"] = len(partial_edges) + + if avail_dict is None: + assert set(partial_edges) == set(complement_edges(G)), ( + "unweighted partial solutions should be the complement" + ) + elif len(avail_dict) > 0: + H = G.copy() + + # Find the partial / full augmented connectivity + H.add_edges_from(partial_edges) + partial_conn = nx.edge_connectivity(H) + + H.add_edges_from(set(avail_dict.keys())) + full_conn = nx.edge_connectivity(H) + + # Full connectivity should be no better than our partial + # solution. + assert partial_conn == full_conn, ( + "adding more edges should not increase k-conn" + ) + + # Find the new edge-connectivity after adding the augmenting edges + aug_edges = partial_edges + else: + infeasible = False + + # Find the weight of the augmentation + num_edges = len(aug_edges) + if avail is not None: + total_weight = sum(avail_dict[e] for e in aug_edges) + else: + total_weight = num_edges + + info["total_weight"] = total_weight + info["num_edges"] = num_edges + + # Find the new edge-connectivity after adding the augmenting edges + G_aug = G.copy() + G_aug.add_edges_from(aug_edges) + try: + aug_k = nx.edge_connectivity(G_aug) + except nx.NetworkXPointlessConcept: + aug_k = 0 + info["aug_k"] = aug_k + + # Do checks + if not infeasible and orig_k < k: + assert info["aug_k"] >= k, f"connectivity should increase to k={k} or more" + + assert info["aug_k"] >= orig_k, "augmenting should never reduce connectivity" + + _assert_solution_properties(G, aug_edges, avail_dict) + + except Exception: + info["failed"] = True + print(f"edges = {list(G.edges())}") + print(f"nodes = {list(G.nodes())}") + print(f"aug_edges = {list(aug_edges)}") + print(f"info = {info}") + raise + else: + if verbose: + print(f"info = {info}") + + if infeasible: + aug_edges = None + return aug_edges, info + + +def _check_augmentations(G, avail=None, max_k=None, weight=None, verbose=False): + """Helper to check weighted/unweighted cases with multiple values of k""" + # Using all available edges, find the maximum edge-connectivity + try: + orig_k = nx.edge_connectivity(G) + except nx.NetworkXPointlessConcept: + orig_k = 0 + + if avail is not None: + all_aug_edges = _unpack_available_edges(avail, weight=weight)[0] + G_aug_all = G.copy() + G_aug_all.add_edges_from(all_aug_edges) + try: + max_aug_k = nx.edge_connectivity(G_aug_all) + except nx.NetworkXPointlessConcept: + max_aug_k = 0 + else: + max_aug_k = G.number_of_nodes() - 1 + + if max_k is None: + max_k = min(4, max_aug_k) + + avail_uniform = dict.fromkeys(complement_edges(G), 1) + + if verbose: + print("\n=== CHECK_AUGMENTATION ===") + print(f"G.number_of_nodes = {G.number_of_nodes()!r}") + print(f"G.number_of_edges = {G.number_of_edges()!r}") + print(f"max_k = {max_k!r}") + print(f"max_aug_k = {max_aug_k!r}") + print(f"orig_k = {orig_k!r}") + + # check augmentation for multiple values of k + for k in range(1, max_k + 1): + if verbose: + print("---------------") + print(f"Checking k = {k}") + + # Check the unweighted version + if verbose: + print("unweighted case") + aug_edges1, info1 = _augment_and_check(G, k=k, verbose=verbose, orig_k=orig_k) + + # Check that the weighted version with all available edges and uniform + # weights gives a similar solution to the unweighted case. + if verbose: + print("weighted uniform case") + aug_edges2, info2 = _augment_and_check( + G, + k=k, + avail=avail_uniform, + verbose=verbose, + orig_k=orig_k, + max_aug_k=G.number_of_nodes() - 1, + ) + + # Check the weighted version + if avail is not None: + if verbose: + print("weighted case") + aug_edges3, info3 = _augment_and_check( + G, + k=k, + avail=avail, + weight=weight, + verbose=verbose, + max_aug_k=max_aug_k, + orig_k=orig_k, + ) + + if aug_edges1 is not None: + # Check approximation ratios + if k == 1: + # when k=1, both solutions should be optimal + assert info2["total_weight"] == info1["total_weight"] + if k == 2: + # when k=2, the weighted version is an approximation + if orig_k == 0: + # the approximation ratio is 3 if G is not connected + assert info2["total_weight"] <= info1["total_weight"] * 3 + else: + # the approximation ratio is 2 if G is was connected + assert info2["total_weight"] <= info1["total_weight"] * 2 + _check_unconstrained_bridge_property(G, info1) + + +def _check_unconstrained_bridge_property(G, info1): + # Check Theorem 5 from Eswaran and Tarjan. (1975) Augmentation problems + import math + + bridge_ccs = list(nx.connectivity.bridge_components(G)) + # condense G into an forest C + C = collapse(G, bridge_ccs) + + p = len([n for n, d in C.degree() if d == 1]) # leafs + q = len([n for n, d in C.degree() if d == 0]) # isolated + if p + q > 1: + size_target = math.ceil(p / 2) + q + size_aug = info1["num_edges"] + assert size_aug == size_target, ( + "augmentation size is different from what theory predicts" + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py new file mode 100644 index 0000000..f14ed64 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_edge_kcomponents.py @@ -0,0 +1,488 @@ +import itertools as it + +import pytest + +import networkx as nx +from networkx.algorithms.connectivity import EdgeComponentAuxGraph, bridge_components +from networkx.algorithms.connectivity.edge_kcomponents import general_k_edge_subgraphs +from networkx.utils import pairwise + +# ---------------- +# Helper functions +# ---------------- + + +def fset(list_of_sets): + """allows == to be used for list of sets""" + return set(map(frozenset, list_of_sets)) + + +def _assert_subgraph_edge_connectivity(G, ccs_subgraph, k): + """ + tests properties of k-edge-connected subgraphs + + the actual edge connectivity should be no less than k unless the cc is a + single node. + """ + for cc in ccs_subgraph: + C = G.subgraph(cc) + if len(cc) > 1: + connectivity = nx.edge_connectivity(C) + assert connectivity >= k + + +def _memo_connectivity(G, u, v, memo): + edge = (u, v) + if edge in memo: + return memo[edge] + if not G.is_directed(): + redge = (v, u) + if redge in memo: + return memo[redge] + memo[edge] = nx.edge_connectivity(G, *edge) + return memo[edge] + + +def _all_pairs_connectivity(G, cc, k, memo): + # Brute force check + for u, v in it.combinations(cc, 2): + # Use a memoization dict to save on computation + connectivity = _memo_connectivity(G, u, v, memo) + if G.is_directed(): + connectivity = min(connectivity, _memo_connectivity(G, v, u, memo)) + assert connectivity >= k + + +def _assert_local_cc_edge_connectivity(G, ccs_local, k, memo): + """ + tests properties of k-edge-connected components + + the local edge connectivity between each pair of nodes in the original + graph should be no less than k unless the cc is a single node. + """ + for cc in ccs_local: + if len(cc) > 1: + # Strategy for testing a bit faster: If the subgraph has high edge + # connectivity then it must have local connectivity + C = G.subgraph(cc) + connectivity = nx.edge_connectivity(C) + if connectivity < k: + # Otherwise do the brute force (with memoization) check + _all_pairs_connectivity(G, cc, k, memo) + + +# Helper function +def _check_edge_connectivity(G): + """ + Helper - generates all k-edge-components using the aux graph. Checks the + both local and subgraph edge connectivity of each cc. Also checks that + alternate methods of computing the k-edge-ccs generate the same result. + """ + # Construct the auxiliary graph that can be used to make each k-cc or k-sub + aux_graph = EdgeComponentAuxGraph.construct(G) + + # memoize the local connectivity in this graph + memo = {} + + for k in it.count(1): + # Test "local" k-edge-components and k-edge-subgraphs + ccs_local = fset(aux_graph.k_edge_components(k)) + ccs_subgraph = fset(aux_graph.k_edge_subgraphs(k)) + + # Check connectivity properties that should be guaranteed by the + # algorithms. + _assert_local_cc_edge_connectivity(G, ccs_local, k, memo) + _assert_subgraph_edge_connectivity(G, ccs_subgraph, k) + + if k == 1 or k == 2 and not G.is_directed(): + assert ccs_local == ccs_subgraph, ( + "Subgraphs and components should be the same when k == 1 or (k == 2 and not G.directed())" + ) + + if G.is_directed(): + # Test special case methods are the same as the aux graph + if k == 1: + alt_sccs = fset(nx.strongly_connected_components(G)) + assert alt_sccs == ccs_local, "k=1 failed alt" + assert alt_sccs == ccs_subgraph, "k=1 failed alt" + else: + # Test special case methods are the same as the aux graph + if k == 1: + alt_ccs = fset(nx.connected_components(G)) + assert alt_ccs == ccs_local, "k=1 failed alt" + assert alt_ccs == ccs_subgraph, "k=1 failed alt" + elif k == 2: + alt_bridge_ccs = fset(bridge_components(G)) + assert alt_bridge_ccs == ccs_local, "k=2 failed alt" + assert alt_bridge_ccs == ccs_subgraph, "k=2 failed alt" + # if new methods for k == 3 or k == 4 are implemented add them here + + # Check the general subgraph method works by itself + alt_subgraph_ccs = fset( + [set(C.nodes()) for C in general_k_edge_subgraphs(G, k=k)] + ) + assert alt_subgraph_ccs == ccs_subgraph, "alt subgraph method failed" + + # Stop once k is larger than all special case methods + # and we cannot break down ccs any further. + if k > 2 and all(len(cc) == 1 for cc in ccs_local): + break + + +# ---------------- +# Misc tests +# ---------------- + + +def test_zero_k_exception(): + G = nx.Graph() + # functions that return generators error immediately + pytest.raises(ValueError, nx.k_edge_components, G, k=0) + pytest.raises(ValueError, nx.k_edge_subgraphs, G, k=0) + + # actual generators only error when you get the first item + aux_graph = EdgeComponentAuxGraph.construct(G) + pytest.raises(ValueError, list, aux_graph.k_edge_components(k=0)) + pytest.raises(ValueError, list, aux_graph.k_edge_subgraphs(k=0)) + + pytest.raises(ValueError, list, general_k_edge_subgraphs(G, k=0)) + + +def test_empty_input(): + G = nx.Graph() + assert [] == list(nx.k_edge_components(G, k=5)) + assert [] == list(nx.k_edge_subgraphs(G, k=5)) + + G = nx.DiGraph() + assert [] == list(nx.k_edge_components(G, k=5)) + assert [] == list(nx.k_edge_subgraphs(G, k=5)) + + +def test_not_implemented(): + G = nx.MultiGraph() + pytest.raises(nx.NetworkXNotImplemented, EdgeComponentAuxGraph.construct, G) + pytest.raises(nx.NetworkXNotImplemented, nx.k_edge_components, G, k=2) + pytest.raises(nx.NetworkXNotImplemented, nx.k_edge_subgraphs, G, k=2) + with pytest.raises(nx.NetworkXNotImplemented): + next(bridge_components(G)) + with pytest.raises(nx.NetworkXNotImplemented): + next(bridge_components(nx.DiGraph())) + + +def test_general_k_edge_subgraph_quick_return(): + # tests quick return optimization + G = nx.Graph() + G.add_node(0) + subgraphs = list(general_k_edge_subgraphs(G, k=1)) + assert len(subgraphs) == 1 + for subgraph in subgraphs: + assert subgraph.number_of_nodes() == 1 + + G.add_node(1) + subgraphs = list(general_k_edge_subgraphs(G, k=1)) + assert len(subgraphs) == 2 + for subgraph in subgraphs: + assert subgraph.number_of_nodes() == 1 + + +# ---------------- +# Undirected tests +# ---------------- + + +def test_random_gnp(): + # seeds = [1550709854, 1309423156, 4208992358, 2785630813, 1915069929] + seeds = [12, 13] + + for seed in seeds: + G = nx.gnp_random_graph(20, 0.2, seed=seed) + _check_edge_connectivity(G) + + +def test_configuration(): + # seeds = [2718183590, 2470619828, 1694705158, 3001036531, 2401251497] + seeds = [14, 15] + for seed in seeds: + deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000) + G = nx.Graph(nx.configuration_model(deg_seq, seed=seed)) + G.remove_edges_from(nx.selfloop_edges(G)) + _check_edge_connectivity(G) + + +def test_shell(): + # seeds = [2057382236, 3331169846, 1840105863, 476020778, 2247498425] + seeds = [20] + for seed in seeds: + constructor = [(12, 70, 0.8), (15, 40, 0.6)] + G = nx.random_shell_graph(constructor, seed=seed) + _check_edge_connectivity(G) + + +def test_karate(): + G = nx.karate_club_graph() + _check_edge_connectivity(G) + + +def test_tarjan_bridge(): + # graph from tarjan paper + # RE Tarjan - "A note on finding the bridges of a graph" + # Information Processing Letters, 1974 - Elsevier + # doi:10.1016/0020-0190(74)90003-9. + # define 2-connected components and bridges + ccs = [ + (1, 2, 4, 3, 1, 4), + (5, 6, 7, 5), + (8, 9, 10, 8), + (17, 18, 16, 15, 17), + (11, 12, 14, 13, 11, 14), + ] + bridges = [(4, 8), (3, 5), (3, 17)] + G = nx.Graph(it.chain(*(pairwise(path) for path in ccs + bridges))) + _check_edge_connectivity(G) + + +def test_bridge_cc(): + # define 2-connected components and bridges + cc2 = [(1, 2, 4, 3, 1, 4), (8, 9, 10, 8), (11, 12, 13, 11)] + bridges = [(4, 8), (3, 5), (20, 21), (22, 23, 24)] + G = nx.Graph(it.chain(*(pairwise(path) for path in cc2 + bridges))) + bridge_ccs = fset(bridge_components(G)) + target_ccs = fset( + [{1, 2, 3, 4}, {5}, {8, 9, 10}, {11, 12, 13}, {20}, {21}, {22}, {23}, {24}] + ) + assert bridge_ccs == target_ccs + _check_edge_connectivity(G) + + +def test_undirected_aux_graph(): + # Graph similar to the one in + # http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264 + a, b, c, d, e, f, g, h, i = "abcdefghi" + paths = [ + (a, d, b, f, c), + (a, e, b), + (a, e, b, c, g, b, a), + (c, b), + (f, g, f), + (h, i), + ] + G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) + aux_graph = EdgeComponentAuxGraph.construct(G) + + components_1 = fset(aux_graph.k_edge_subgraphs(k=1)) + target_1 = fset([{a, b, c, d, e, f, g}, {h, i}]) + assert target_1 == components_1 + + # Check that the undirected case for k=1 agrees with CCs + alt_1 = fset(nx.k_edge_subgraphs(G, k=1)) + assert alt_1 == components_1 + + components_2 = fset(aux_graph.k_edge_subgraphs(k=2)) + target_2 = fset([{a, b, c, d, e, f, g}, {h}, {i}]) + assert target_2 == components_2 + + # Check that the undirected case for k=2 agrees with bridge components + alt_2 = fset(nx.k_edge_subgraphs(G, k=2)) + assert alt_2 == components_2 + + components_3 = fset(aux_graph.k_edge_subgraphs(k=3)) + target_3 = fset([{a}, {b, c, f, g}, {d}, {e}, {h}, {i}]) + assert target_3 == components_3 + + components_4 = fset(aux_graph.k_edge_subgraphs(k=4)) + target_4 = fset([{a}, {b}, {c}, {d}, {e}, {f}, {g}, {h}, {i}]) + assert target_4 == components_4 + + _check_edge_connectivity(G) + + +def test_local_subgraph_difference(): + paths = [ + (11, 12, 13, 14, 11, 13, 14, 12), # first 4-clique + (21, 22, 23, 24, 21, 23, 24, 22), # second 4-clique + # paths connecting each node of the 4 cliques + (11, 101, 21), + (12, 102, 22), + (13, 103, 23), + (14, 104, 24), + ] + G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) + aux_graph = EdgeComponentAuxGraph.construct(G) + + # Each clique is returned separately in k-edge-subgraphs + subgraph_ccs = fset(aux_graph.k_edge_subgraphs(3)) + subgraph_target = fset( + [{101}, {102}, {103}, {104}, {21, 22, 23, 24}, {11, 12, 13, 14}] + ) + assert subgraph_ccs == subgraph_target + + # But in k-edge-ccs they are returned together + # because they are locally 3-edge-connected + local_ccs = fset(aux_graph.k_edge_components(3)) + local_target = fset([{101}, {102}, {103}, {104}, {11, 12, 13, 14, 21, 22, 23, 24}]) + assert local_ccs == local_target + + +def test_local_subgraph_difference_directed(): + dipaths = [(1, 2, 3, 4, 1), (1, 3, 1)] + G = nx.DiGraph(it.chain(*[pairwise(path) for path in dipaths])) + + assert fset(nx.k_edge_components(G, k=1)) == fset(nx.k_edge_subgraphs(G, k=1)) + + # Unlike undirected graphs, when k=2, for directed graphs there is a case + # where the k-edge-ccs are not the same as the k-edge-subgraphs. + # (in directed graphs ccs and subgraphs are the same when k=2) + assert fset(nx.k_edge_components(G, k=2)) != fset(nx.k_edge_subgraphs(G, k=2)) + + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) + + _check_edge_connectivity(G) + + +def test_triangles(): + paths = [ + (11, 12, 13, 11), # first 3-clique + (21, 22, 23, 21), # second 3-clique + (11, 21), # connected by an edge + ] + G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) + + # subgraph and ccs are the same in all cases here + assert fset(nx.k_edge_components(G, k=1)) == fset(nx.k_edge_subgraphs(G, k=1)) + + assert fset(nx.k_edge_components(G, k=2)) == fset(nx.k_edge_subgraphs(G, k=2)) + + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) + + _check_edge_connectivity(G) + + +def test_four_clique(): + paths = [ + (11, 12, 13, 14, 11, 13, 14, 12), # first 4-clique + (21, 22, 23, 24, 21, 23, 24, 22), # second 4-clique + # paths connecting the 4 cliques such that they are + # 3-connected in G, but not in the subgraph. + # Case where the nodes bridging them do not have degree less than 3. + (100, 13), + (12, 100, 22), + (13, 200, 23), + (14, 300, 24), + ] + G = nx.Graph(it.chain(*[pairwise(path) for path in paths])) + + # The subgraphs and ccs are different for k=3 + local_ccs = fset(nx.k_edge_components(G, k=3)) + subgraphs = fset(nx.k_edge_subgraphs(G, k=3)) + assert local_ccs != subgraphs + + # The cliques ares in the same cc + clique1 = frozenset(paths[0]) + clique2 = frozenset(paths[1]) + assert clique1.union(clique2).union({100}) in local_ccs + + # but different subgraphs + assert clique1 in subgraphs + assert clique2 in subgraphs + + assert G.degree(100) == 3 + + _check_edge_connectivity(G) + + +def test_five_clique(): + # Make a graph that can be disconnected less than 4 edges, but no node has + # degree less than 4. + G = nx.disjoint_union(nx.complete_graph(5), nx.complete_graph(5)) + paths = [ + # add aux-connections + (1, 100, 6), + (2, 100, 7), + (3, 200, 8), + (4, 200, 100), + ] + G.add_edges_from(it.chain(*[pairwise(path) for path in paths])) + assert min(dict(nx.degree(G)).values()) == 4 + + # For k=3 they are the same + assert fset(nx.k_edge_components(G, k=3)) == fset(nx.k_edge_subgraphs(G, k=3)) + + # For k=4 they are the different + # the aux nodes are in the same CC as clique 1 but no the same subgraph + assert fset(nx.k_edge_components(G, k=4)) != fset(nx.k_edge_subgraphs(G, k=4)) + + # For k=5 they are not the same + assert fset(nx.k_edge_components(G, k=5)) != fset(nx.k_edge_subgraphs(G, k=5)) + + # For k=6 they are the same + assert fset(nx.k_edge_components(G, k=6)) == fset(nx.k_edge_subgraphs(G, k=6)) + _check_edge_connectivity(G) + + +# ---------------- +# Undirected tests +# ---------------- + + +def test_directed_aux_graph(): + # Graph similar to the one in + # http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0136264 + a, b, c, d, e, f, g, h, i = "abcdefghi" + dipaths = [ + (a, d, b, f, c), + (a, e, b), + (a, e, b, c, g, b, a), + (c, b), + (f, g, f), + (h, i), + ] + G = nx.DiGraph(it.chain(*[pairwise(path) for path in dipaths])) + aux_graph = EdgeComponentAuxGraph.construct(G) + + components_1 = fset(aux_graph.k_edge_subgraphs(k=1)) + target_1 = fset([{a, b, c, d, e, f, g}, {h}, {i}]) + assert target_1 == components_1 + + # Check that the directed case for k=1 agrees with SCCs + alt_1 = fset(nx.strongly_connected_components(G)) + assert alt_1 == components_1 + + components_2 = fset(aux_graph.k_edge_subgraphs(k=2)) + target_2 = fset([{i}, {e}, {d}, {b, c, f, g}, {h}, {a}]) + assert target_2 == components_2 + + components_3 = fset(aux_graph.k_edge_subgraphs(k=3)) + target_3 = fset([{a}, {b}, {c}, {d}, {e}, {f}, {g}, {h}, {i}]) + assert target_3 == components_3 + + +def test_random_gnp_directed(): + # seeds = [3894723670, 500186844, 267231174, 2181982262, 1116750056] + seeds = [21] + for seed in seeds: + G = nx.gnp_random_graph(20, 0.2, directed=True, seed=seed) + _check_edge_connectivity(G) + + +def test_configuration_directed(): + # seeds = [671221681, 2403749451, 124433910, 672335939, 1193127215] + seeds = [67] + for seed in seeds: + deg_seq = nx.random_powerlaw_tree_sequence(20, seed=seed, tries=5000) + G = nx.DiGraph(nx.configuration_model(deg_seq, seed=seed)) + G.remove_edges_from(nx.selfloop_edges(G)) + _check_edge_connectivity(G) + + +def test_shell_directed(): + # seeds = [3134027055, 4079264063, 1350769518, 1405643020, 530038094] + seeds = [31] + for seed in seeds: + constructor = [(12, 70, 0.8), (15, 40, 0.6)] + G = nx.random_shell_graph(constructor, seed=seed).to_directed() + _check_edge_connectivity(G) + + +def test_karate_directed(): + G = nx.karate_club_graph().to_directed() + _check_edge_connectivity(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcomponents.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcomponents.py new file mode 100644 index 0000000..f4436ac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcomponents.py @@ -0,0 +1,296 @@ +# Test for Moody and White k-components algorithm +import pytest + +import networkx as nx +from networkx.algorithms.connectivity.kcomponents import ( + _consolidate, + build_k_number_dict, +) + +## +# A nice synthetic graph +## + + +def torrents_and_ferraro_graph(): + # Graph from https://arxiv.org/pdf/1503.04476v1 p.26 + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") + labels = {v: k for k, v in rlabels.items()} + + for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]: + new_node = G.order() + 1 + # Petersen graph is triconnected + P = nx.petersen_graph() + G = nx.disjoint_union(G, P) + # Add two edges between the grid and P + G.add_edge(new_node + 1, nodes[0]) + G.add_edge(new_node, nodes[1]) + # K5 is 4-connected + K = nx.complete_graph(5) + G = nx.disjoint_union(G, K) + # Add three edges between P and K5 + G.add_edge(new_node + 2, new_node + 11) + G.add_edge(new_node + 3, new_node + 12) + G.add_edge(new_node + 4, new_node + 13) + # Add another K5 sharing a node + G = nx.disjoint_union(G, K) + nbrs = G[new_node + 10] + G.remove_node(new_node + 10) + for nbr in nbrs: + G.add_edge(new_node + 17, nbr) + # This edge makes the graph biconnected; it's + # needed because K5s share only one node. + G.add_edge(new_node + 16, new_node + 8) + + for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]: + new_node = G.order() + 1 + # Petersen graph is triconnected + P = nx.petersen_graph() + G = nx.disjoint_union(G, P) + # Add two edges between the grid and P + G.add_edge(new_node + 1, nodes[0]) + G.add_edge(new_node, nodes[1]) + # K5 is 4-connected + K = nx.complete_graph(5) + G = nx.disjoint_union(G, K) + # Add three edges between P and K5 + G.add_edge(new_node + 2, new_node + 11) + G.add_edge(new_node + 3, new_node + 12) + G.add_edge(new_node + 4, new_node + 13) + # Add another K5 sharing two nodes + G = nx.disjoint_union(G, K) + nbrs = G[new_node + 10] + G.remove_node(new_node + 10) + for nbr in nbrs: + G.add_edge(new_node + 17, nbr) + nbrs2 = G[new_node + 9] + G.remove_node(new_node + 9) + for nbr in nbrs2: + G.add_edge(new_node + 18, nbr) + return G + + +def test_directed(): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.gnp_random_graph(10, 0.2, directed=True, seed=42) + nx.k_components(G) + + +# Helper function +def _check_connectivity(G, k_components): + for k, components in k_components.items(): + if k < 3: + continue + # check that k-components have node connectivity >= k. + for component in components: + C = G.subgraph(component) + K = nx.node_connectivity(C) + assert K >= k + + +@pytest.mark.slow +def test_torrents_and_ferraro_graph(): + G = torrents_and_ferraro_graph() + result = nx.k_components(G) + _check_connectivity(G, result) + + # In this example graph there are 8 3-components, 4 with 15 nodes + # and 4 with 5 nodes. + assert len(result[3]) == 8 + assert len([c for c in result[3] if len(c) == 15]) == 4 + assert len([c for c in result[3] if len(c) == 5]) == 4 + # There are also 8 4-components all with 5 nodes. + assert len(result[4]) == 8 + assert all(len(c) == 5 for c in result[4]) + + +@pytest.mark.slow +def test_random_gnp(): + G = nx.gnp_random_graph(50, 0.2, seed=42) + result = nx.k_components(G) + _check_connectivity(G, result) + + +@pytest.mark.slow +def test_shell(): + constructor = [(20, 80, 0.8), (80, 180, 0.6)] + G = nx.random_shell_graph(constructor, seed=42) + result = nx.k_components(G) + _check_connectivity(G, result) + + +def test_configuration(): + deg_seq = nx.random_powerlaw_tree_sequence(100, tries=5, seed=72) + G = nx.Graph(nx.configuration_model(deg_seq)) + G.remove_edges_from(nx.selfloop_edges(G)) + result = nx.k_components(G) + _check_connectivity(G, result) + + +def test_karate(): + G = nx.karate_club_graph() + result = nx.k_components(G) + _check_connectivity(G, result) + + +def test_karate_component_number(): + karate_k_num = { + 0: 4, + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 3, + 7: 4, + 8: 4, + 9: 2, + 10: 3, + 11: 1, + 12: 2, + 13: 4, + 14: 2, + 15: 2, + 16: 2, + 17: 2, + 18: 2, + 19: 3, + 20: 2, + 21: 2, + 22: 2, + 23: 3, + 24: 3, + 25: 3, + 26: 2, + 27: 3, + 28: 3, + 29: 3, + 30: 4, + 31: 3, + 32: 4, + 33: 4, + } + G = nx.karate_club_graph() + k_components = nx.k_components(G) + k_num = build_k_number_dict(k_components) + assert karate_k_num == k_num + + +def test_davis_southern_women(): + G = nx.davis_southern_women_graph() + result = nx.k_components(G) + _check_connectivity(G, result) + + +def test_davis_southern_women_detail_3_and_4(): + solution = { + 3: [ + { + "Nora Fayette", + "E10", + "Myra Liddel", + "E12", + "E14", + "Frances Anderson", + "Evelyn Jefferson", + "Ruth DeSand", + "Helen Lloyd", + "Eleanor Nye", + "E9", + "E8", + "E5", + "E4", + "E7", + "E6", + "E1", + "Verne Sanderson", + "E3", + "E2", + "Theresa Anderson", + "Pearl Oglethorpe", + "Katherina Rogers", + "Brenda Rogers", + "E13", + "Charlotte McDowd", + "Sylvia Avondale", + "Laura Mandeville", + } + ], + 4: [ + { + "Nora Fayette", + "E10", + "Verne Sanderson", + "E12", + "Frances Anderson", + "Evelyn Jefferson", + "Ruth DeSand", + "Helen Lloyd", + "Eleanor Nye", + "E9", + "E8", + "E5", + "E4", + "E7", + "E6", + "Myra Liddel", + "E3", + "Theresa Anderson", + "Katherina Rogers", + "Brenda Rogers", + "Charlotte McDowd", + "Sylvia Avondale", + "Laura Mandeville", + } + ], + } + G = nx.davis_southern_women_graph() + result = nx.k_components(G) + for k, components in result.items(): + if k < 3: + continue + assert len(components) == len(solution[k]) + for component in components: + assert component in solution[k] + + +def test_set_consolidation_rosettacode(): + # Tests from http://rosettacode.org/wiki/Set_consolidation + def list_of_sets_equal(result, solution): + assert {frozenset(s) for s in result} == {frozenset(s) for s in solution} + + question = [{"A", "B"}, {"C", "D"}] + solution = [{"A", "B"}, {"C", "D"}] + list_of_sets_equal(_consolidate(question, 1), solution) + question = [{"A", "B"}, {"B", "C"}] + solution = [{"A", "B", "C"}] + list_of_sets_equal(_consolidate(question, 1), solution) + question = [{"A", "B"}, {"C", "D"}, {"D", "B"}] + solution = [{"A", "C", "B", "D"}] + list_of_sets_equal(_consolidate(question, 1), solution) + question = [{"H", "I", "K"}, {"A", "B"}, {"C", "D"}, {"D", "B"}, {"F", "G", "H"}] + solution = [{"A", "C", "B", "D"}, {"G", "F", "I", "H", "K"}] + list_of_sets_equal(_consolidate(question, 1), solution) + question = [ + {"A", "H"}, + {"H", "I", "K"}, + {"A", "B"}, + {"C", "D"}, + {"D", "B"}, + {"F", "G", "H"}, + ] + solution = [{"A", "C", "B", "D", "G", "F", "I", "H", "K"}] + list_of_sets_equal(_consolidate(question, 1), solution) + question = [ + {"H", "I", "K"}, + {"A", "B"}, + {"C", "D"}, + {"D", "B"}, + {"F", "G", "H"}, + {"A", "H"}, + ] + solution = [{"A", "C", "B", "D", "G", "F", "I", "H", "K"}] + list_of_sets_equal(_consolidate(question, 1), solution) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcutsets.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcutsets.py new file mode 100644 index 0000000..67c0bfd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_kcutsets.py @@ -0,0 +1,270 @@ +# Jordi Torrents +# Test for k-cutsets +import itertools + +import pytest + +import networkx as nx +from networkx.algorithms import flow +from networkx.algorithms.connectivity.kcutsets import _is_separating_set + +MAX_CUTSETS_TO_TEST = 4 # originally 100. cut to decrease testing time + +flow_funcs = [ + flow.boykov_kolmogorov, + flow.dinitz, + flow.edmonds_karp, + flow.preflow_push, + flow.shortest_augmenting_path, +] + + +## +# Some nice synthetic graphs +## +def graph_example_1(): + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") + labels = {v: k for k, v in rlabels.items()} + + for nodes in [ + (labels[(0, 0)], labels[(1, 0)]), + (labels[(0, 4)], labels[(1, 4)]), + (labels[(3, 0)], labels[(4, 0)]), + (labels[(3, 4)], labels[(4, 4)]), + ]: + new_node = G.order() + 1 + # Petersen graph is triconnected + P = nx.petersen_graph() + G = nx.disjoint_union(G, P) + # Add two edges between the grid and P + G.add_edge(new_node + 1, nodes[0]) + G.add_edge(new_node, nodes[1]) + # K5 is 4-connected + K = nx.complete_graph(5) + G = nx.disjoint_union(G, K) + # Add three edges between P and K5 + G.add_edge(new_node + 2, new_node + 11) + G.add_edge(new_node + 3, new_node + 12) + G.add_edge(new_node + 4, new_node + 13) + # Add another K5 sharing a node + G = nx.disjoint_union(G, K) + nbrs = G[new_node + 10] + G.remove_node(new_node + 10) + for nbr in nbrs: + G.add_edge(new_node + 17, nbr) + G.add_edge(new_node + 16, new_node + 5) + return G + + +def torrents_and_ferraro_graph(): + G = nx.convert_node_labels_to_integers( + nx.grid_graph([5, 5]), label_attribute="labels" + ) + rlabels = nx.get_node_attributes(G, "labels") + labels = {v: k for k, v in rlabels.items()} + + for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]: + new_node = G.order() + 1 + # Petersen graph is triconnected + P = nx.petersen_graph() + G = nx.disjoint_union(G, P) + # Add two edges between the grid and P + G.add_edge(new_node + 1, nodes[0]) + G.add_edge(new_node, nodes[1]) + # K5 is 4-connected + K = nx.complete_graph(5) + G = nx.disjoint_union(G, K) + # Add three edges between P and K5 + G.add_edge(new_node + 2, new_node + 11) + G.add_edge(new_node + 3, new_node + 12) + G.add_edge(new_node + 4, new_node + 13) + # Add another K5 sharing a node + G = nx.disjoint_union(G, K) + nbrs = G[new_node + 10] + G.remove_node(new_node + 10) + for nbr in nbrs: + G.add_edge(new_node + 17, nbr) + # Commenting this makes the graph not biconnected !! + # This stupid mistake make one reviewer very angry :P + G.add_edge(new_node + 16, new_node + 8) + + for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]: + new_node = G.order() + 1 + # Petersen graph is triconnected + P = nx.petersen_graph() + G = nx.disjoint_union(G, P) + # Add two edges between the grid and P + G.add_edge(new_node + 1, nodes[0]) + G.add_edge(new_node, nodes[1]) + # K5 is 4-connected + K = nx.complete_graph(5) + G = nx.disjoint_union(G, K) + # Add three edges between P and K5 + G.add_edge(new_node + 2, new_node + 11) + G.add_edge(new_node + 3, new_node + 12) + G.add_edge(new_node + 4, new_node + 13) + # Add another K5 sharing two nodes + G = nx.disjoint_union(G, K) + nbrs = G[new_node + 10] + G.remove_node(new_node + 10) + for nbr in nbrs: + G.add_edge(new_node + 17, nbr) + nbrs2 = G[new_node + 9] + G.remove_node(new_node + 9) + for nbr in nbrs2: + G.add_edge(new_node + 18, nbr) + return G + + +# Helper function +def _check_separating_sets(G): + for cc in nx.connected_components(G): + if len(cc) < 3: + continue + Gc = G.subgraph(cc) + node_conn = nx.node_connectivity(Gc) + all_cuts = nx.all_node_cuts(Gc) + # Only test a limited number of cut sets to reduce test time. + for cut in itertools.islice(all_cuts, MAX_CUTSETS_TO_TEST): + assert node_conn == len(cut) + assert not nx.is_connected(nx.restricted_view(G, cut, [])) + + +@pytest.mark.slow +def test_torrents_and_ferraro_graph(): + G = torrents_and_ferraro_graph() + _check_separating_sets(G) + + +def test_example_1(): + G = graph_example_1() + _check_separating_sets(G) + + +def test_random_gnp(): + G = nx.gnp_random_graph(100, 0.1, seed=42) + _check_separating_sets(G) + + +def test_shell(): + constructor = [(20, 80, 0.8), (80, 180, 0.6)] + G = nx.random_shell_graph(constructor, seed=42) + _check_separating_sets(G) + + +def test_configuration(): + deg_seq = nx.random_powerlaw_tree_sequence(100, tries=5, seed=72) + G = nx.Graph(nx.configuration_model(deg_seq)) + G.remove_edges_from(nx.selfloop_edges(G)) + _check_separating_sets(G) + + +def test_karate(): + G = nx.karate_club_graph() + _check_separating_sets(G) + + +def _generate_no_biconnected(max_attempts=50): + attempts = 0 + while True: + G = nx.fast_gnp_random_graph(100, 0.0575, seed=42) + if nx.is_connected(G) and not nx.is_biconnected(G): + attempts = 0 + yield G + else: + if attempts >= max_attempts: + msg = f"Tried {attempts} times: no suitable Graph." + raise Exception(msg) + else: + attempts += 1 + + +def test_articulation_points(): + Ggen = _generate_no_biconnected() + for i in range(1): # change 1 to 3 or more for more realizations. + G = next(Ggen) + articulation_points = [{a} for a in nx.articulation_points(G)] + for cut in nx.all_node_cuts(G): + assert cut in articulation_points + + +def test_grid_2d_graph(): + # All minimum node cuts of a 2d grid + # are the four pairs of nodes that are + # neighbors of the four corner nodes. + G = nx.grid_2d_graph(5, 5) + solution = [{(0, 1), (1, 0)}, {(3, 0), (4, 1)}, {(3, 4), (4, 3)}, {(0, 3), (1, 4)}] + for cut in nx.all_node_cuts(G): + assert cut in solution + + +def test_disconnected_graph(): + G = nx.fast_gnp_random_graph(100, 0.01, seed=42) + cuts = nx.all_node_cuts(G) + pytest.raises(nx.NetworkXError, next, cuts) + + +@pytest.mark.slow +def test_alternative_flow_functions(): + graphs = [nx.grid_2d_graph(4, 4), nx.cycle_graph(5)] + for G in graphs: + node_conn = nx.node_connectivity(G) + for flow_func in flow_funcs: + all_cuts = nx.all_node_cuts(G, flow_func=flow_func) + # Only test a limited number of cut sets to reduce test time. + for cut in itertools.islice(all_cuts, MAX_CUTSETS_TO_TEST): + assert node_conn == len(cut) + assert not nx.is_connected(nx.restricted_view(G, cut, [])) + + +def test_is_separating_set_complete_graph(): + G = nx.complete_graph(5) + assert _is_separating_set(G, {0, 1, 2, 3}) + + +def test_is_separating_set(): + for i in [5, 10, 15]: + G = nx.star_graph(i) + max_degree_node = max(G, key=G.degree) + assert _is_separating_set(G, {max_degree_node}) + + +def test_non_repeated_cuts(): + # The algorithm was repeating the cut {0, 1} for the giant biconnected + # component of the Karate club graph. + K = nx.karate_club_graph() + bcc = max(list(nx.biconnected_components(K)), key=len) + G = K.subgraph(bcc) + solution = [{32, 33}, {2, 33}, {0, 3}, {0, 1}, {29, 33}] + cuts = list(nx.all_node_cuts(G)) + assert len(solution) == len(cuts) + for cut in cuts: + assert cut in solution + + +def test_cycle_graph(): + G = nx.cycle_graph(5) + solution = [{0, 2}, {0, 3}, {1, 3}, {1, 4}, {2, 4}] + cuts = list(nx.all_node_cuts(G)) + assert len(solution) == len(cuts) + for cut in cuts: + assert cut in solution + + +def test_complete_graph(): + G = nx.complete_graph(5) + assert nx.node_connectivity(G) == 4 + assert list(nx.all_node_cuts(G)) == [] + + +def test_all_node_cuts_simple_case(): + G = nx.complete_graph(5) + G.remove_edges_from([(0, 1), (3, 4)]) + expected = [{0, 1, 2}, {2, 3, 4}] + actual = list(nx.all_node_cuts(G)) + assert len(actual) == len(expected) + for cut in actual: + assert cut in expected diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py new file mode 100644 index 0000000..2b9e2ba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/tests/test_stoer_wagner.py @@ -0,0 +1,102 @@ +from itertools import chain + +import pytest + +import networkx as nx + + +def _check_partition(G, cut_value, partition, weight): + assert isinstance(partition, tuple) + assert len(partition) == 2 + assert isinstance(partition[0], list) + assert isinstance(partition[1], list) + assert len(partition[0]) > 0 + assert len(partition[1]) > 0 + assert sum(map(len, partition)) == len(G) + assert set(chain.from_iterable(partition)) == set(G) + partition = tuple(map(set, partition)) + w = 0 + for u, v, e in G.edges(data=True): + if (u in partition[0]) == (v in partition[1]): + w += e.get(weight, 1) + assert w == cut_value + + +def _test_stoer_wagner(G, answer, weight="weight"): + cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.PairingHeap) + assert cut_value == answer + _check_partition(G, cut_value, partition, weight) + cut_value, partition = nx.stoer_wagner(G, weight, heap=nx.utils.BinaryHeap) + assert cut_value == answer + _check_partition(G, cut_value, partition, weight) + + +def test_graph1(): + G = nx.Graph() + G.add_edge("x", "a", weight=3) + G.add_edge("x", "b", weight=1) + G.add_edge("a", "c", weight=3) + G.add_edge("b", "c", weight=5) + G.add_edge("b", "d", weight=4) + G.add_edge("d", "e", weight=2) + G.add_edge("c", "y", weight=2) + G.add_edge("e", "y", weight=3) + _test_stoer_wagner(G, 4) + + +def test_graph2(): + G = nx.Graph() + G.add_edge("x", "a") + G.add_edge("x", "b") + G.add_edge("a", "c") + G.add_edge("b", "c") + G.add_edge("b", "d") + G.add_edge("d", "e") + G.add_edge("c", "y") + G.add_edge("e", "y") + _test_stoer_wagner(G, 2) + + +def test_graph3(): + # Source: + # Stoer, M. and Wagner, F. (1997). "A simple min-cut algorithm". Journal of + # the ACM 44 (4), 585-591. + G = nx.Graph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 5, weight=3) + G.add_edge(2, 3, weight=3) + G.add_edge(2, 5, weight=2) + G.add_edge(2, 6, weight=2) + G.add_edge(3, 4, weight=4) + G.add_edge(3, 7, weight=2) + G.add_edge(4, 7, weight=2) + G.add_edge(4, 8, weight=2) + G.add_edge(5, 6, weight=3) + G.add_edge(6, 7, weight=1) + G.add_edge(7, 8, weight=3) + _test_stoer_wagner(G, 4) + + +def test_weight_name(): + G = nx.Graph() + G.add_edge(1, 2, weight=1, cost=8) + G.add_edge(1, 3, cost=2) + G.add_edge(2, 3, cost=4) + _test_stoer_wagner(G, 6, weight="cost") + + +def test_exceptions(): + G = nx.Graph() + pytest.raises(nx.NetworkXError, nx.stoer_wagner, G) + G.add_node(1) + pytest.raises(nx.NetworkXError, nx.stoer_wagner, G) + G.add_node(2) + pytest.raises(nx.NetworkXError, nx.stoer_wagner, G) + G.add_edge(1, 2, weight=-2) + pytest.raises(nx.NetworkXError, nx.stoer_wagner, G) + G = nx.DiGraph() + pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) + G = nx.MultiGraph() + pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) + G = nx.MultiDiGraph() + pytest.raises(nx.NetworkXNotImplemented, nx.stoer_wagner, G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/utils.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/utils.py new file mode 100644 index 0000000..7bf9994 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/connectivity/utils.py @@ -0,0 +1,88 @@ +""" +Utilities for connectivity package +""" + +import networkx as nx + +__all__ = ["build_auxiliary_node_connectivity", "build_auxiliary_edge_connectivity"] + + +@nx._dispatchable(returns_graph=True) +def build_auxiliary_node_connectivity(G): + r"""Creates a directed graph D from an undirected graph G to compute flow + based node connectivity. + + For an undirected graph G having `n` nodes and `m` edges we derive a + directed graph D with `2n` nodes and `2m+n` arcs by replacing each + original node `v` with two nodes `vA`, `vB` linked by an (internal) + arc in D. Then for each edge (`u`, `v`) in G we add two arcs (`uB`, `vA`) + and (`vB`, `uA`) in D. Finally we set the attribute capacity = 1 for each + arc in D [1]_. + + For a directed graph having `n` nodes and `m` arcs we derive a + directed graph D with `2n` nodes and `m+n` arcs by replacing each + original node `v` with two nodes `vA`, `vB` linked by an (internal) + arc (`vA`, `vB`) in D. Then for each arc (`u`, `v`) in G we add one + arc (`uB`, `vA`) in D. Finally we set the attribute capacity = 1 for + each arc in D. + + A dictionary with a mapping between nodes in the original graph and the + auxiliary digraph is stored as a graph attribute: D.graph['mapping']. + + References + ---------- + .. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and + Erlebach, 'Network Analysis: Methodological Foundations', Lecture + Notes in Computer Science, Volume 3418, Springer-Verlag, 2005. + https://doi.org/10.1007/978-3-540-31955-9_7 + + """ + directed = G.is_directed() + + mapping = {} + H = nx.DiGraph() + + for i, node in enumerate(G): + mapping[node] = i + H.add_node(f"{i}A", id=node) + H.add_node(f"{i}B", id=node) + H.add_edge(f"{i}A", f"{i}B", capacity=1) + + edges = [] + for source, target in G.edges(): + edges.append((f"{mapping[source]}B", f"{mapping[target]}A")) + if not directed: + edges.append((f"{mapping[target]}B", f"{mapping[source]}A")) + H.add_edges_from(edges, capacity=1) + + # Store mapping as graph attribute + H.graph["mapping"] = mapping + return H + + +@nx._dispatchable(returns_graph=True) +def build_auxiliary_edge_connectivity(G): + """Auxiliary digraph for computing flow based edge connectivity + + If the input graph is undirected, we replace each edge (`u`,`v`) with + two reciprocal arcs (`u`, `v`) and (`v`, `u`) and then we set the attribute + 'capacity' for each arc to 1. If the input graph is directed we simply + add the 'capacity' attribute. Part of algorithm 1 in [1]_ . + + References + ---------- + .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. (this is a + chapter, look for the reference of the book). + http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf + """ + if G.is_directed(): + H = nx.DiGraph() + H.add_nodes_from(G.nodes()) + H.add_edges_from(G.edges(), capacity=1) + return H + else: + H = nx.DiGraph() + H.add_nodes_from(G.nodes()) + for source, target in G.edges(): + H.add_edges_from([(source, target), (target, source)], capacity=1) + return H diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/core.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/core.py new file mode 100644 index 0000000..fec26ec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/core.py @@ -0,0 +1,588 @@ +""" +Find the k-cores of a graph. + +The k-core is found by recursively pruning nodes with degrees less than k. + +See the following references for details: + +An O(m) Algorithm for Cores Decomposition of Networks +Vladimir Batagelj and Matjaz Zaversnik, 2003. +https://arxiv.org/abs/cs.DS/0310049 + +Generalized Cores +Vladimir Batagelj and Matjaz Zaversnik, 2002. +https://arxiv.org/pdf/cs/0202039 + +For directed graphs a more general notion is that of D-cores which +looks at (k, l) restrictions on (in, out) degree. The (k, k) D-core +is the k-core. + +D-cores: Measuring Collaboration of Directed Graphs Based on Degeneracy +Christos Giatsidis, Dimitrios M. Thilikos, Michalis Vazirgiannis, ICDM 2011. +http://www.graphdegeneracy.org/dcores_ICDM_2011.pdf + +Multi-scale structure and topological anomaly detection via a new network \ +statistic: The onion decomposition +L. Hébert-Dufresne, J. A. Grochow, and A. Allard +Scientific Reports 6, 31708 (2016) +http://doi.org/10.1038/srep31708 + +""" + +import networkx as nx + +__all__ = [ + "core_number", + "k_core", + "k_shell", + "k_crust", + "k_corona", + "k_truss", + "onion_layers", +] + + +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable +def core_number(G): + """Returns the core number for each node. + + A k-core is a maximal subgraph that contains nodes of degree k or more. + + The core number of a node is the largest value k of a k-core containing + that node. + + Parameters + ---------- + G : NetworkX graph + An undirected or directed graph + + Returns + ------- + core_number : dictionary + A dictionary keyed by node to the core number. + + Raises + ------ + NetworkXNotImplemented + If `G` is a multigraph or contains self loops. + + Notes + ----- + For directed graphs the node degree is defined to be the + in-degree + out-degree. + + Examples + -------- + >>> degrees = [0, 1, 2, 2, 2, 2, 3] + >>> H = nx.havel_hakimi_graph(degrees) + >>> nx.core_number(H) + {0: 1, 1: 2, 2: 2, 3: 2, 4: 1, 5: 2, 6: 0} + >>> G = nx.DiGraph() + >>> G.add_edges_from([(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)]) + >>> nx.core_number(G) + {1: 2, 2: 2, 3: 2, 4: 2} + + References + ---------- + .. [1] An O(m) Algorithm for Cores Decomposition of Networks + Vladimir Batagelj and Matjaz Zaversnik, 2003. + https://arxiv.org/abs/cs.DS/0310049 + """ + if nx.number_of_selfloops(G) > 0: + msg = ( + "Input graph has self loops which is not permitted; " + "Consider using G.remove_edges_from(nx.selfloop_edges(G))." + ) + raise nx.NetworkXNotImplemented(msg) + degrees = dict(G.degree()) + # Sort nodes by degree. + nodes = sorted(degrees, key=degrees.get) + bin_boundaries = [0] + curr_degree = 0 + for i, v in enumerate(nodes): + if degrees[v] > curr_degree: + bin_boundaries.extend([i] * (degrees[v] - curr_degree)) + curr_degree = degrees[v] + node_pos = {v: pos for pos, v in enumerate(nodes)} + # The initial guess for the core number of a node is its degree. + core = degrees + nbrs = {v: list(nx.all_neighbors(G, v)) for v in G} + for v in nodes: + for u in nbrs[v]: + if core[u] > core[v]: + nbrs[u].remove(v) + pos = node_pos[u] + bin_start = bin_boundaries[core[u]] + node_pos[u] = bin_start + node_pos[nodes[bin_start]] = pos + nodes[bin_start], nodes[pos] = nodes[pos], nodes[bin_start] + bin_boundaries[core[u]] += 1 + core[u] -= 1 + return core + + +def _core_subgraph(G, k_filter, k=None, core=None): + """Returns the subgraph induced by nodes passing filter `k_filter`. + + Parameters + ---------- + G : NetworkX graph + The graph or directed graph to process + k_filter : filter function + This function filters the nodes chosen. It takes three inputs: + A node of G, the filter's cutoff, and the core dict of the graph. + The function should return a Boolean value. + k : int, optional + The order of the core. If not specified use the max core number. + This value is used as the cutoff for the filter. + core : dict, optional + Precomputed core numbers keyed by node for the graph `G`. + If not specified, the core numbers will be computed from `G`. + + """ + if core is None: + core = core_number(G) + if k is None: + k = max(core.values()) + nodes = (v for v in core if k_filter(v, k, core)) + return G.subgraph(nodes).copy() + + +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def k_core(G, k=None, core_number=None): + """Returns the k-core of G. + + A k-core is a maximal subgraph that contains nodes of degree `k` or more. + + Parameters + ---------- + G : NetworkX graph + A graph or directed graph + k : int, optional + The order of the core. If not specified return the main core. + core_number : dictionary, optional + Precomputed core numbers for the graph G. + + Returns + ------- + G : NetworkX graph + The k-core subgraph + + Raises + ------ + NetworkXNotImplemented + The k-core is not defined for multigraphs or graphs with self loops. + + Notes + ----- + The main core is the core with `k` as the largest core_number. + + For directed graphs the node degree is defined to be the + in-degree + out-degree. + + Graph, node, and edge attributes are copied to the subgraph. + + Examples + -------- + >>> degrees = [0, 1, 2, 2, 2, 2, 3] + >>> H = nx.havel_hakimi_graph(degrees) + >>> H.degree + DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0}) + >>> nx.k_core(H).nodes + NodeView((1, 2, 3, 5)) + + See Also + -------- + core_number + + References + ---------- + .. [1] An O(m) Algorithm for Cores Decomposition of Networks + Vladimir Batagelj and Matjaz Zaversnik, 2003. + https://arxiv.org/abs/cs.DS/0310049 + """ + + def k_filter(v, k, c): + return c[v] >= k + + return _core_subgraph(G, k_filter, k, core_number) + + +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def k_shell(G, k=None, core_number=None): + """Returns the k-shell of G. + + The k-shell is the subgraph induced by nodes with core number k. + That is, nodes in the k-core that are not in the (k+1)-core. + + Parameters + ---------- + G : NetworkX graph + A graph or directed graph. + k : int, optional + The order of the shell. If not specified return the outer shell. + core_number : dictionary, optional + Precomputed core numbers for the graph G. + + + Returns + ------- + G : NetworkX graph + The k-shell subgraph + + Raises + ------ + NetworkXNotImplemented + The k-shell is not implemented for multigraphs or graphs with self loops. + + Notes + ----- + This is similar to k_corona but in that case only neighbors in the + k-core are considered. + + For directed graphs the node degree is defined to be the + in-degree + out-degree. + + Graph, node, and edge attributes are copied to the subgraph. + + Examples + -------- + >>> degrees = [0, 1, 2, 2, 2, 2, 3] + >>> H = nx.havel_hakimi_graph(degrees) + >>> H.degree + DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0}) + >>> nx.k_shell(H, k=1).nodes + NodeView((0, 4)) + + See Also + -------- + core_number + k_corona + + + References + ---------- + .. [1] A model of Internet topology using k-shell decomposition + Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt, + and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154 + http://www.pnas.org/content/104/27/11150.full + """ + + def k_filter(v, k, c): + return c[v] == k + + return _core_subgraph(G, k_filter, k, core_number) + + +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def k_crust(G, k=None, core_number=None): + """Returns the k-crust of G. + + The k-crust is the graph G with the edges of the k-core removed + and isolated nodes found after the removal of edges are also removed. + + Parameters + ---------- + G : NetworkX graph + A graph or directed graph. + k : int, optional + The order of the shell. If not specified return the main crust. + core_number : dictionary, optional + Precomputed core numbers for the graph G. + + Returns + ------- + G : NetworkX graph + The k-crust subgraph + + Raises + ------ + NetworkXNotImplemented + The k-crust is not implemented for multigraphs or graphs with self loops. + + Notes + ----- + This definition of k-crust is different than the definition in [1]_. + The k-crust in [1]_ is equivalent to the k+1 crust of this algorithm. + + For directed graphs the node degree is defined to be the + in-degree + out-degree. + + Graph, node, and edge attributes are copied to the subgraph. + + Examples + -------- + >>> degrees = [0, 1, 2, 2, 2, 2, 3] + >>> H = nx.havel_hakimi_graph(degrees) + >>> H.degree + DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0}) + >>> nx.k_crust(H, k=1).nodes + NodeView((0, 4, 6)) + + See Also + -------- + core_number + + References + ---------- + .. [1] A model of Internet topology using k-shell decomposition + Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt, + and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154 + http://www.pnas.org/content/104/27/11150.full + """ + # Default for k is one less than in _core_subgraph, so just inline. + # Filter is c[v] <= k + if core_number is None: + core_number = nx.core_number(G) + if k is None: + k = max(core_number.values()) - 1 + nodes = (v for v in core_number if core_number[v] <= k) + return G.subgraph(nodes).copy() + + +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def k_corona(G, k, core_number=None): + """Returns the k-corona of G. + + The k-corona is the subgraph of nodes in the k-core which have + exactly k neighbors in the k-core. + + Parameters + ---------- + G : NetworkX graph + A graph or directed graph + k : int + The order of the corona. + core_number : dictionary, optional + Precomputed core numbers for the graph G. + + Returns + ------- + G : NetworkX graph + The k-corona subgraph + + Raises + ------ + NetworkXNotImplemented + The k-corona is not defined for multigraphs or graphs with self loops. + + Notes + ----- + For directed graphs the node degree is defined to be the + in-degree + out-degree. + + Graph, node, and edge attributes are copied to the subgraph. + + Examples + -------- + >>> degrees = [0, 1, 2, 2, 2, 2, 3] + >>> H = nx.havel_hakimi_graph(degrees) + >>> H.degree + DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0}) + >>> nx.k_corona(H, k=2).nodes + NodeView((1, 2, 3, 5)) + + See Also + -------- + core_number + + References + ---------- + .. [1] k -core (bootstrap) percolation on complex networks: + Critical phenomena and nonlocal effects, + A. V. Goltsev, S. N. Dorogovtsev, and J. F. F. Mendes, + Phys. Rev. E 73, 056101 (2006) + http://link.aps.org/doi/10.1103/PhysRevE.73.056101 + """ + + def func(v, k, c): + return c[v] == k and k == sum(1 for w in G[v] if c[w] >= k) + + return _core_subgraph(G, func, k, core_number) + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def k_truss(G, k): + """Returns the k-truss of `G`. + + The k-truss is the maximal induced subgraph of `G` which contains at least + three vertices where every edge is incident to at least `k-2` triangles. + + Parameters + ---------- + G : NetworkX graph + An undirected graph + k : int + The order of the truss + + Returns + ------- + H : NetworkX graph + The k-truss subgraph + + Raises + ------ + NetworkXNotImplemented + If `G` is a multigraph or directed graph or if it contains self loops. + + Notes + ----- + A k-clique is a (k-2)-truss and a k-truss is a (k+1)-core. + + Graph, node, and edge attributes are copied to the subgraph. + + K-trusses were originally defined in [2] which states that the k-truss + is the maximal induced subgraph where each edge belongs to at least + `k-2` triangles. A more recent paper, [1], uses a slightly different + definition requiring that each edge belong to at least `k` triangles. + This implementation uses the original definition of `k-2` triangles. + + Examples + -------- + >>> degrees = [0, 1, 2, 2, 2, 2, 3] + >>> H = nx.havel_hakimi_graph(degrees) + >>> H.degree + DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0}) + >>> nx.k_truss(H, k=2).nodes + NodeView((0, 1, 2, 3, 4, 5)) + + References + ---------- + .. [1] Bounds and Algorithms for k-truss. Paul Burkhardt, Vance Faber, + David G. Harris, 2018. https://arxiv.org/abs/1806.05523v2 + .. [2] Trusses: Cohesive Subgraphs for Social Network Analysis. Jonathan + Cohen, 2005. + """ + if nx.number_of_selfloops(G) > 0: + msg = ( + "Input graph has self loops which is not permitted; " + "Consider using G.remove_edges_from(nx.selfloop_edges(G))." + ) + raise nx.NetworkXNotImplemented(msg) + + H = G.copy() + + n_dropped = 1 + while n_dropped > 0: + n_dropped = 0 + to_drop = [] + seen = set() + for u in H: + nbrs_u = set(H[u]) + seen.add(u) + new_nbrs = [v for v in nbrs_u if v not in seen] + for v in new_nbrs: + if len(nbrs_u & set(H[v])) < (k - 2): + to_drop.append((u, v)) + H.remove_edges_from(to_drop) + n_dropped = len(to_drop) + H.remove_nodes_from(list(nx.isolates(H))) + + return H + + +@nx.utils.not_implemented_for("multigraph") +@nx.utils.not_implemented_for("directed") +@nx._dispatchable +def onion_layers(G): + """Returns the layer of each vertex in an onion decomposition of the graph. + + The onion decomposition refines the k-core decomposition by providing + information on the internal organization of each k-shell. It is usually + used alongside the `core numbers`. + + Parameters + ---------- + G : NetworkX graph + An undirected graph without self loops. + + Returns + ------- + od_layers : dictionary + A dictionary keyed by node to the onion layer. The layers are + contiguous integers starting at 1. + + Raises + ------ + NetworkXNotImplemented + If `G` is a multigraph or directed graph or if it contains self loops. + + Examples + -------- + >>> degrees = [0, 1, 2, 2, 2, 2, 3] + >>> H = nx.havel_hakimi_graph(degrees) + >>> H.degree + DegreeView({0: 1, 1: 2, 2: 2, 3: 2, 4: 2, 5: 3, 6: 0}) + >>> nx.onion_layers(H) + {6: 1, 0: 2, 4: 3, 1: 4, 2: 4, 3: 4, 5: 4} + + See Also + -------- + core_number + + References + ---------- + .. [1] Multi-scale structure and topological anomaly detection via a new + network statistic: The onion decomposition + L. Hébert-Dufresne, J. A. Grochow, and A. Allard + Scientific Reports 6, 31708 (2016) + http://doi.org/10.1038/srep31708 + .. [2] Percolation and the effective structure of complex networks + A. Allard and L. Hébert-Dufresne + Physical Review X 9, 011023 (2019) + http://doi.org/10.1103/PhysRevX.9.011023 + """ + if nx.number_of_selfloops(G) > 0: + msg = ( + "Input graph contains self loops which is not permitted; " + "Consider using G.remove_edges_from(nx.selfloop_edges(G))." + ) + raise nx.NetworkXNotImplemented(msg) + # Dictionaries to register the k-core/onion decompositions. + od_layers = {} + # Adjacency list + neighbors = {v: list(nx.all_neighbors(G, v)) for v in G} + # Effective degree of nodes. + degrees = dict(G.degree()) + # Performs the onion decomposition. + current_core = 1 + current_layer = 1 + # Sets vertices of degree 0 to layer 1, if any. + isolated_nodes = list(nx.isolates(G)) + if len(isolated_nodes) > 0: + for v in isolated_nodes: + od_layers[v] = current_layer + degrees.pop(v) + current_layer = 2 + # Finds the layer for the remaining nodes. + while len(degrees) > 0: + # Sets the order for looking at nodes. + nodes = sorted(degrees, key=degrees.get) + # Sets properly the current core. + min_degree = degrees[nodes[0]] + if min_degree > current_core: + current_core = min_degree + # Identifies vertices in the current layer. + this_layer = [] + for n in nodes: + if degrees[n] > current_core: + break + this_layer.append(n) + # Identifies the core/layer of the vertices in the current layer. + for v in this_layer: + od_layers[v] = current_layer + for n in neighbors[v]: + neighbors[n].remove(v) + degrees[n] = degrees[n] - 1 + degrees.pop(v) + # Updates the layer count. + current_layer = current_layer + 1 + # Returns the dictionaries containing the onion layer of each vertices. + return od_layers diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py new file mode 100644 index 0000000..cdf607b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/covering.py @@ -0,0 +1,142 @@ +"""Functions related to graph covers.""" + +from functools import partial +from itertools import chain + +import networkx as nx +from networkx.utils import arbitrary_element, not_implemented_for + +__all__ = ["min_edge_cover", "is_edge_cover"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def min_edge_cover(G, matching_algorithm=None): + """Returns the min cardinality edge cover of the graph as a set of edges. + + A smallest edge cover can be found in polynomial time by finding + a maximum matching and extending it greedily so that all nodes + are covered. This function follows that process. A maximum matching + algorithm can be specified for the first step of the algorithm. + The resulting set may return a set with one 2-tuple for each edge, + (the usual case) or with both 2-tuples `(u, v)` and `(v, u)` for + each edge. The latter is only done when a bipartite matching algorithm + is specified as `matching_algorithm`. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + matching_algorithm : function + A function that returns a maximum cardinality matching for `G`. + The function must take one input, the graph `G`, and return + either a set of edges (with only one direction for the pair of nodes) + or a dictionary mapping each node to its mate. If not specified, + :func:`~networkx.algorithms.matching.max_weight_matching` is used. + Common bipartite matching functions include + :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching` + or + :func:`~networkx.algorithms.bipartite.matching.eppstein_matching`. + + Returns + ------- + min_cover : set + + A set of the edges in a minimum edge cover in the form of tuples. + It contains only one of the equivalent 2-tuples `(u, v)` and `(v, u)` + for each edge. If a bipartite method is used to compute the matching, + the returned set contains both the 2-tuples `(u, v)` and `(v, u)` + for each edge of a minimum edge cover. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> sorted(nx.min_edge_cover(G)) + [(2, 1), (3, 0)] + + Notes + ----- + An edge cover of a graph is a set of edges such that every node of + the graph is incident to at least one edge of the set. + The minimum edge cover is an edge covering of smallest cardinality. + + Due to its implementation, the worst-case running time of this algorithm + is bounded by the worst-case running time of the function + ``matching_algorithm``. + + Minimum edge cover for `G` can also be found using + :func:`~networkx.algorithms.bipartite.covering.min_edge_covering` which is + simply this function with a default matching algorithm of + :func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching` + """ + if len(G) == 0: + return set() + if nx.number_of_isolates(G) > 0: + # ``min_cover`` does not exist as there is an isolated node + raise nx.NetworkXException( + "Graph has a node with no edge incident on it, so no edge cover exists." + ) + if matching_algorithm is None: + matching_algorithm = partial(nx.max_weight_matching, maxcardinality=True) + maximum_matching = matching_algorithm(G) + # ``min_cover`` is superset of ``maximum_matching`` + try: + # bipartite matching algs return dict so convert if needed + min_cover = set(maximum_matching.items()) + bipartite_cover = True + except AttributeError: + min_cover = maximum_matching + bipartite_cover = False + # iterate for uncovered nodes + uncovered_nodes = set(G) - {v for u, v in min_cover} - {u for u, v in min_cover} + for v in uncovered_nodes: + # Since `v` is uncovered, each edge incident to `v` will join it + # with a covered node (otherwise, if there were an edge joining + # uncovered nodes `u` and `v`, the maximum matching algorithm + # would have found it), so we can choose an arbitrary edge + # incident to `v`. (This applies only in a simple graph, not a + # multigraph.) + u = arbitrary_element(G[v]) + min_cover.add((u, v)) + if bipartite_cover: + min_cover.add((v, u)) + return min_cover + + +@not_implemented_for("directed") +@nx._dispatchable +def is_edge_cover(G, cover): + """Decides whether a set of edges is a valid edge cover of the graph. + + Given a set of edges, whether it is an edge covering can + be decided if we just check whether all nodes of the graph + has an edge from the set, incident on it. + + Parameters + ---------- + G : NetworkX graph + An undirected bipartite graph. + + cover : set + Set of edges to be checked. + + Returns + ------- + bool + Whether the set of edges is a valid edge cover of the graph. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> cover = {(2, 1), (3, 0)} + >>> nx.is_edge_cover(G, cover) + True + + Notes + ----- + An edge cover of a graph is a set of edges such that every node of + the graph is incident to at least one edge of the set. + """ + return set(G) <= set(chain.from_iterable(cover)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py new file mode 100644 index 0000000..e951431 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/cuts.py @@ -0,0 +1,398 @@ +"""Functions for finding and evaluating cuts in a graph.""" + +from itertools import chain + +import networkx as nx + +__all__ = [ + "boundary_expansion", + "conductance", + "cut_size", + "edge_expansion", + "mixing_expansion", + "node_expansion", + "normalized_cut_size", + "volume", +] + + +# TODO STILL NEED TO UPDATE ALL THE DOCUMENTATION! + + +@nx._dispatchable(edge_attrs="weight") +def cut_size(G, S, T=None, weight=None): + """Returns the size of the cut between two sets of nodes. + + A *cut* is a partition of the nodes of a graph into two sets. The + *cut size* is the sum of the weights of the edges "between" the two + sets of nodes. + + Parameters + ---------- + G : NetworkX graph + + S : collection + A collection of nodes in `G`. + + T : collection + A collection of nodes in `G`. If not specified, this is taken to + be the set complement of `S`. + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + number + Total weight of all edges from nodes in set `S` to nodes in + set `T` (and, in the case of directed graphs, all edges from + nodes in `T` to nodes in `S`). + + Examples + -------- + In the graph with two cliques joined by a single edges, the natural + bipartition of the graph into two blocks, one for each clique, + yields a cut of weight one:: + + >>> G = nx.barbell_graph(3, 0) + >>> S = {0, 1, 2} + >>> T = {3, 4, 5} + >>> nx.cut_size(G, S, T) + 1 + + Each parallel edge in a multigraph is counted when determining the + cut size:: + + >>> G = nx.MultiGraph(["ab", "ab"]) + >>> S = {"a"} + >>> T = {"b"} + >>> nx.cut_size(G, S, T) + 2 + + Notes + ----- + In a multigraph, the cut size is the total weight of edges including + multiplicity. + + """ + edges = nx.edge_boundary(G, S, T, data=weight, default=1) + if G.is_directed(): + edges = chain(edges, nx.edge_boundary(G, T, S, data=weight, default=1)) + return sum(weight for u, v, weight in edges) + + +@nx._dispatchable(edge_attrs="weight") +def volume(G, S, weight=None): + """Returns the volume of a set of nodes. + + The *volume* of a set *S* is the sum of the (out-)degrees of nodes + in *S* (taking into account parallel edges in multigraphs). [1] + + Parameters + ---------- + G : NetworkX graph + + S : collection + A collection of nodes in `G`. + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + number + The volume of the set of nodes represented by `S` in the graph + `G`. + + See also + -------- + conductance + cut_size + edge_expansion + edge_boundary + normalized_cut_size + + References + ---------- + .. [1] David Gleich. + *Hierarchical Directed Spectral Graph Partitioning*. + + + """ + degree = G.out_degree if G.is_directed() else G.degree + return sum(d for v, d in degree(S, weight=weight)) + + +@nx._dispatchable(edge_attrs="weight") +def normalized_cut_size(G, S, T=None, weight=None): + """Returns the normalized size of the cut between two sets of nodes. + + The *normalized cut size* is the cut size times the sum of the + reciprocal sizes of the volumes of the two sets. [1] + + Parameters + ---------- + G : NetworkX graph + + S : collection + A collection of nodes in `G`. + + T : collection + A collection of nodes in `G`. + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + number + The normalized cut size between the two sets `S` and `T`. + + Notes + ----- + In a multigraph, the cut size is the total weight of edges including + multiplicity. + + See also + -------- + conductance + cut_size + edge_expansion + volume + + References + ---------- + .. [1] David Gleich. + *Hierarchical Directed Spectral Graph Partitioning*. + + + """ + if T is None: + T = set(G) - set(S) + num_cut_edges = cut_size(G, S, T=T, weight=weight) + volume_S = volume(G, S, weight=weight) + volume_T = volume(G, T, weight=weight) + return num_cut_edges * ((1 / volume_S) + (1 / volume_T)) + + +@nx._dispatchable(edge_attrs="weight") +def conductance(G, S, T=None, weight=None): + """Returns the conductance of two sets of nodes. + + The *conductance* is the quotient of the cut size and the smaller of + the volumes of the two sets. [1] + + Parameters + ---------- + G : NetworkX graph + + S : collection + A collection of nodes in `G`. + + T : collection + A collection of nodes in `G`. + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + number + The conductance between the two sets `S` and `T`. + + See also + -------- + cut_size + edge_expansion + normalized_cut_size + volume + + References + ---------- + .. [1] David Gleich. + *Hierarchical Directed Spectral Graph Partitioning*. + + + """ + if T is None: + T = set(G) - set(S) + num_cut_edges = cut_size(G, S, T, weight=weight) + volume_S = volume(G, S, weight=weight) + volume_T = volume(G, T, weight=weight) + return num_cut_edges / min(volume_S, volume_T) + + +@nx._dispatchable(edge_attrs="weight") +def edge_expansion(G, S, T=None, weight=None): + """Returns the edge expansion between two node sets. + + The *edge expansion* is the quotient of the cut size and the smaller + of the cardinalities of the two sets. [1] + + Parameters + ---------- + G : NetworkX graph + + S : collection + A collection of nodes in `G`. + + T : collection + A collection of nodes in `G`. + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + number + The edge expansion between the two sets `S` and `T`. + + See also + -------- + boundary_expansion + mixing_expansion + node_expansion + + References + ---------- + .. [1] Fan Chung. + *Spectral Graph Theory*. + (CBMS Regional Conference Series in Mathematics, No. 92), + American Mathematical Society, 1997, ISBN 0-8218-0315-8 + + + """ + if T is None: + T = set(G) - set(S) + num_cut_edges = cut_size(G, S, T=T, weight=weight) + return num_cut_edges / min(len(S), len(T)) + + +@nx._dispatchable(edge_attrs="weight") +def mixing_expansion(G, S, T=None, weight=None): + """Returns the mixing expansion between two node sets. + + The *mixing expansion* is the quotient of the cut size and twice the + number of edges in the graph. [1] + + Parameters + ---------- + G : NetworkX graph + + S : collection + A collection of nodes in `G`. + + T : collection + A collection of nodes in `G`. + + weight : object + Edge attribute key to use as weight. If not specified, edges + have weight one. + + Returns + ------- + number + The mixing expansion between the two sets `S` and `T`. + + See also + -------- + boundary_expansion + edge_expansion + node_expansion + + References + ---------- + .. [1] Vadhan, Salil P. + "Pseudorandomness." + *Foundations and Trends + in Theoretical Computer Science* 7.1–3 (2011): 1–336. + + + """ + num_cut_edges = cut_size(G, S, T=T, weight=weight) + num_total_edges = G.number_of_edges() + return num_cut_edges / (2 * num_total_edges) + + +# TODO What is the generalization to two arguments, S and T? Does the +# denominator become `min(len(S), len(T))`? +@nx._dispatchable +def node_expansion(G, S): + """Returns the node expansion of the set `S`. + + The *node expansion* is the quotient of the size of the node + boundary of *S* and the cardinality of *S*. [1] + + Parameters + ---------- + G : NetworkX graph + + S : collection + A collection of nodes in `G`. + + Returns + ------- + number + The node expansion of the set `S`. + + See also + -------- + boundary_expansion + edge_expansion + mixing_expansion + + References + ---------- + .. [1] Vadhan, Salil P. + "Pseudorandomness." + *Foundations and Trends + in Theoretical Computer Science* 7.1–3 (2011): 1–336. + + + """ + neighborhood = set(chain.from_iterable(G.neighbors(v) for v in S)) + return len(neighborhood) / len(S) + + +# TODO What is the generalization to two arguments, S and T? Does the +# denominator become `min(len(S), len(T))`? +@nx._dispatchable +def boundary_expansion(G, S): + """Returns the boundary expansion of the set `S`. + + The *boundary expansion* is the quotient of the size + of the node boundary and the cardinality of *S*. [1] + + Parameters + ---------- + G : NetworkX graph + + S : collection + A collection of nodes in `G`. + + Returns + ------- + number + The boundary expansion of the set `S`. + + See also + -------- + edge_expansion + mixing_expansion + node_expansion + + References + ---------- + .. [1] Vadhan, Salil P. + "Pseudorandomness." + *Foundations and Trends in Theoretical Computer Science* + 7.1–3 (2011): 1–336. + + + """ + return len(nx.node_boundary(G, S)) / len(S) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/cycles.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/cycles.py new file mode 100644 index 0000000..3890a20 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/cycles.py @@ -0,0 +1,1234 @@ +""" +======================== +Cycle finding algorithms +======================== +""" + +from collections import defaultdict +from itertools import combinations, product +from math import inf + +import networkx as nx +from networkx.utils import not_implemented_for, pairwise + +__all__ = [ + "cycle_basis", + "simple_cycles", + "recursive_simple_cycles", + "find_cycle", + "minimum_cycle_basis", + "chordless_cycles", + "girth", +] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def cycle_basis(G, root=None): + """Returns a list of cycles which form a basis for cycles of G. + + A basis for cycles of a network is a minimal collection of + cycles such that any cycle in the network can be written + as a sum of cycles in the basis. Here summation of cycles + is defined as "exclusive or" of the edges. Cycle bases are + useful, e.g. when deriving equations for electric circuits + using Kirchhoff's Laws. + + Parameters + ---------- + G : NetworkX Graph + root : node, optional + Specify starting node for basis. + + Returns + ------- + A list of cycle lists. Each cycle list is a list of nodes + which forms a cycle (loop) in G. + + Examples + -------- + >>> G = nx.Graph() + >>> nx.add_cycle(G, [0, 1, 2, 3]) + >>> nx.add_cycle(G, [0, 3, 4, 5]) + >>> nx.cycle_basis(G, 0) + [[3, 4, 5, 0], [1, 2, 3, 0]] + + Notes + ----- + This is adapted from algorithm CACM 491 [1]_. + + References + ---------- + .. [1] Paton, K. An algorithm for finding a fundamental set of + cycles of a graph. Comm. ACM 12, 9 (Sept 1969), 514-518. + + See Also + -------- + simple_cycles + minimum_cycle_basis + """ + gnodes = dict.fromkeys(G) # set-like object that maintains node order + cycles = [] + while gnodes: # loop over connected components + if root is None: + root = gnodes.popitem()[0] + stack = [root] + pred = {root: root} + used = {root: set()} + while stack: # walk the spanning tree finding cycles + z = stack.pop() # use last-in so cycles easier to find + zused = used[z] + for nbr in G[z]: + if nbr not in used: # new node + pred[nbr] = z + stack.append(nbr) + used[nbr] = {z} + elif nbr == z: # self loops + cycles.append([z]) + elif nbr not in zused: # found a cycle + pn = used[nbr] + cycle = [nbr, z] + p = pred[z] + while p not in pn: + cycle.append(p) + p = pred[p] + cycle.append(p) + cycles.append(cycle) + used[nbr].add(z) + for node in pred: + gnodes.pop(node, None) + root = None + return cycles + + +@nx._dispatchable +def simple_cycles(G, length_bound=None): + """Find simple cycles (elementary circuits) of a graph. + + A "simple cycle", or "elementary circuit", is a closed path where + no node appears twice. In a directed graph, two simple cycles are distinct + if they are not cyclic permutations of each other. In an undirected graph, + two simple cycles are distinct if they are not cyclic permutations of each + other nor of the other's reversal. + + Optionally, the cycles are bounded in length. In the unbounded case, we use + a nonrecursive, iterator/generator version of Johnson's algorithm [1]_. In + the bounded case, we use a version of the algorithm of Gupta and + Suzumura [2]_. There may be better algorithms for some cases [3]_ [4]_ [5]_. + + The algorithms of Johnson, and Gupta and Suzumura, are enhanced by some + well-known preprocessing techniques. When `G` is directed, we restrict our + attention to strongly connected components of `G`, generate all simple cycles + containing a certain node, remove that node, and further decompose the + remainder into strongly connected components. When `G` is undirected, we + restrict our attention to biconnected components, generate all simple cycles + containing a particular edge, remove that edge, and further decompose the + remainder into biconnected components. + + Note that multigraphs are supported by this function -- and in undirected + multigraphs, a pair of parallel edges is considered a cycle of length 2. + Likewise, self-loops are considered to be cycles of length 1. We define + cycles as sequences of nodes; so the presence of loops and parallel edges + does not change the number of simple cycles in a graph. + + Parameters + ---------- + G : NetworkX Graph + A networkx graph. Undirected, directed, and multigraphs are all supported. + + length_bound : int or None, optional (default=None) + If `length_bound` is an int, generate all simple cycles of `G` with length at + most `length_bound`. Otherwise, generate all simple cycles of `G`. + + Yields + ------ + list of nodes + Each cycle is represented by a list of nodes along the cycle. + + Examples + -------- + >>> G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]) + >>> sorted(nx.simple_cycles(G)) + [[0], [0, 1, 2], [0, 2], [1, 2], [2]] + + To filter the cycles so that they don't include certain nodes or edges, + copy your graph and eliminate those nodes or edges before calling. + For example, to exclude self-loops from the above example: + + >>> H = G.copy() + >>> H.remove_edges_from(nx.selfloop_edges(G)) + >>> sorted(nx.simple_cycles(H)) + [[0, 1, 2], [0, 2], [1, 2]] + + Notes + ----- + When `length_bound` is None, the time complexity is $O((n+e)(c+1))$ for $n$ + nodes, $e$ edges and $c$ simple circuits. Otherwise, when ``length_bound > 1``, + the time complexity is $O((c+n)(k-1)d^k)$ where $d$ is the average degree of + the nodes of `G` and $k$ = `length_bound`. + + Raises + ------ + ValueError + when ``length_bound < 0``. + + References + ---------- + .. [1] Finding all the elementary circuits of a directed graph. + D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975. + https://doi.org/10.1137/0204007 + .. [2] Finding All Bounded-Length Simple Cycles in a Directed Graph + A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094 + .. [3] Enumerating the cycles of a digraph: a new preprocessing strategy. + G. Loizou and P. Thanish, Information Sciences, v. 27, 163-182, 1982. + .. [4] A search strategy for the elementary cycles of a directed graph. + J.L. Szwarcfiter and P.E. Lauer, BIT NUMERICAL MATHEMATICS, + v. 16, no. 2, 192-204, 1976. + .. [5] Optimal Listing of Cycles and st-Paths in Undirected Graphs + R. Ferreira and R. Grossi and A. Marino and N. Pisanti and R. Rizzi and + G. Sacomoto https://arxiv.org/abs/1205.2766 + + See Also + -------- + cycle_basis + chordless_cycles + """ + + if length_bound is not None: + if length_bound == 0: + return + elif length_bound < 0: + raise ValueError("length bound must be non-negative") + + directed = G.is_directed() + yield from ([v] for v, Gv in G.adj.items() if v in Gv) + + if length_bound is not None and length_bound == 1: + return + + if G.is_multigraph() and not directed: + visited = set() + for u, Gu in G.adj.items(): + multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited) + yield from ([u, v] for v, m in multiplicity if m > 1) + visited.add(u) + + # explicitly filter out loops; implicitly filter out parallel edges + if directed: + G = nx.DiGraph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u) + else: + G = nx.Graph((u, v) for u, Gu in G.adj.items() for v in Gu if v != u) + + # this case is not strictly necessary but improves performance + if length_bound is not None and length_bound == 2: + if directed: + visited = set() + for u, Gu in G.adj.items(): + yield from ( + [v, u] for v in visited.intersection(Gu) if G.has_edge(v, u) + ) + visited.add(u) + return + + if directed: + yield from _directed_cycle_search(G, length_bound) + else: + yield from _undirected_cycle_search(G, length_bound) + + +def _directed_cycle_search(G, length_bound): + """A dispatch function for `simple_cycles` for directed graphs. + + We generate all cycles of G through binary partition. + + 1. Pick a node v in G which belongs to at least one cycle + a. Generate all cycles of G which contain the node v. + b. Recursively generate all cycles of G \\ v. + + This is accomplished through the following: + + 1. Compute the strongly connected components SCC of G. + 2. Select and remove a biconnected component C from BCC. Select a + non-tree edge (u, v) of a depth-first search of G[C]. + 3. For each simple cycle P containing v in G[C], yield P. + 4. Add the biconnected components of G[C \\ v] to BCC. + + If the parameter length_bound is not None, then step 3 will be limited to + simple cycles of length at most length_bound. + + Parameters + ---------- + G : NetworkX DiGraph + A directed graph + + length_bound : int or None + If length_bound is an int, generate all simple cycles of G with length at most length_bound. + Otherwise, generate all simple cycles of G. + + Yields + ------ + list of nodes + Each cycle is represented by a list of nodes along the cycle. + """ + + scc = nx.strongly_connected_components + components = [c for c in scc(G) if len(c) >= 2] + while components: + c = components.pop() + Gc = G.subgraph(c) + v = next(iter(c)) + if length_bound is None: + yield from _johnson_cycle_search(Gc, [v]) + else: + yield from _bounded_cycle_search(Gc, [v], length_bound) + # delete v after searching G, to make sure we can find v + G.remove_node(v) + components.extend(c for c in scc(Gc) if len(c) >= 2) + + +def _undirected_cycle_search(G, length_bound): + """A dispatch function for `simple_cycles` for undirected graphs. + + We generate all cycles of G through binary partition. + + 1. Pick an edge (u, v) in G which belongs to at least one cycle + a. Generate all cycles of G which contain the edge (u, v) + b. Recursively generate all cycles of G \\ (u, v) + + This is accomplished through the following: + + 1. Compute the biconnected components BCC of G. + 2. Select and remove a biconnected component C from BCC. Select a + non-tree edge (u, v) of a depth-first search of G[C]. + 3. For each (v -> u) path P remaining in G[C] \\ (u, v), yield P. + 4. Add the biconnected components of G[C] \\ (u, v) to BCC. + + If the parameter length_bound is not None, then step 3 will be limited to simple paths + of length at most length_bound. + + Parameters + ---------- + G : NetworkX Graph + An undirected graph + + length_bound : int or None + If length_bound is an int, generate all simple cycles of G with length at most length_bound. + Otherwise, generate all simple cycles of G. + + Yields + ------ + list of nodes + Each cycle is represented by a list of nodes along the cycle. + """ + + bcc = nx.biconnected_components + components = [c for c in bcc(G) if len(c) >= 3] + while components: + c = components.pop() + Gc = G.subgraph(c) + uv = list(next(iter(Gc.edges))) + G.remove_edge(*uv) + # delete (u, v) before searching G, to avoid fake 3-cycles [u, v, u] + if length_bound is None: + yield from _johnson_cycle_search(Gc, uv) + else: + yield from _bounded_cycle_search(Gc, uv, length_bound) + components.extend(c for c in bcc(Gc) if len(c) >= 3) + + +class _NeighborhoodCache(dict): + """Very lightweight graph wrapper which caches neighborhoods as list. + + This dict subclass uses the __missing__ functionality to query graphs for + their neighborhoods, and store the result as a list. This is used to avoid + the performance penalty incurred by subgraph views. + """ + + def __init__(self, G): + self.G = G + + def __missing__(self, v): + Gv = self[v] = list(self.G[v]) + return Gv + + +def _johnson_cycle_search(G, path): + """The main loop of the cycle-enumeration algorithm of Johnson. + + Parameters + ---------- + G : NetworkX Graph or DiGraph + A graph + + path : list + A cycle prefix. All cycles generated will begin with this prefix. + + Yields + ------ + list of nodes + Each cycle is represented by a list of nodes along the cycle. + + References + ---------- + .. [1] Finding all the elementary circuits of a directed graph. + D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975. + https://doi.org/10.1137/0204007 + + """ + + G = _NeighborhoodCache(G) + blocked = set(path) + B = defaultdict(set) # graph portions that yield no elementary circuit + start = path[0] + stack = [iter(G[path[-1]])] + closed = [False] + while stack: + nbrs = stack[-1] + for w in nbrs: + if w == start: + yield path[:] + closed[-1] = True + elif w not in blocked: + path.append(w) + closed.append(False) + stack.append(iter(G[w])) + blocked.add(w) + break + else: # no more nbrs + stack.pop() + v = path.pop() + if closed.pop(): + if closed: + closed[-1] = True + unblock_stack = {v} + while unblock_stack: + u = unblock_stack.pop() + if u in blocked: + blocked.remove(u) + unblock_stack.update(B[u]) + B[u].clear() + else: + for w in G[v]: + B[w].add(v) + + +def _bounded_cycle_search(G, path, length_bound): + """The main loop of the cycle-enumeration algorithm of Gupta and Suzumura. + + Parameters + ---------- + G : NetworkX Graph or DiGraph + A graph + + path : list + A cycle prefix. All cycles generated will begin with this prefix. + + length_bound: int + A length bound. All cycles generated will have length at most length_bound. + + Yields + ------ + list of nodes + Each cycle is represented by a list of nodes along the cycle. + + References + ---------- + .. [1] Finding All Bounded-Length Simple Cycles in a Directed Graph + A. Gupta and T. Suzumura https://arxiv.org/abs/2105.10094 + + """ + G = _NeighborhoodCache(G) + lock = dict.fromkeys(path, 0) + B = defaultdict(set) + start = path[0] + stack = [iter(G[path[-1]])] + blen = [length_bound] + while stack: + nbrs = stack[-1] + for w in nbrs: + if w == start: + yield path[:] + blen[-1] = 1 + elif len(path) < lock.get(w, length_bound): + path.append(w) + blen.append(length_bound) + lock[w] = len(path) + stack.append(iter(G[w])) + break + else: + stack.pop() + v = path.pop() + bl = blen.pop() + if blen: + blen[-1] = min(blen[-1], bl) + if bl < length_bound: + relax_stack = [(bl, v)] + while relax_stack: + bl, u = relax_stack.pop() + if lock.get(u, length_bound) < length_bound - bl + 1: + lock[u] = length_bound - bl + 1 + relax_stack.extend((bl + 1, w) for w in B[u].difference(path)) + else: + for w in G[v]: + B[w].add(v) + + +@nx._dispatchable +def chordless_cycles(G, length_bound=None): + """Find simple chordless cycles of a graph. + + A `simple cycle` is a closed path where no node appears twice. In a simple + cycle, a `chord` is an additional edge between two nodes in the cycle. A + `chordless cycle` is a simple cycle without chords. Said differently, a + chordless cycle is a cycle C in a graph G where the number of edges in the + induced graph G[C] is equal to the length of `C`. + + Note that some care must be taken in the case that G is not a simple graph + nor a simple digraph. Some authors limit the definition of chordless cycles + to have a prescribed minimum length; we do not. + + 1. We interpret self-loops to be chordless cycles, except in multigraphs + with multiple loops in parallel. Likewise, in a chordless cycle of + length greater than 1, there can be no nodes with self-loops. + + 2. We interpret directed two-cycles to be chordless cycles, except in + multi-digraphs when any edge in a two-cycle has a parallel copy. + + 3. We interpret parallel pairs of undirected edges as two-cycles, except + when a third (or more) parallel edge exists between the two nodes. + + 4. Generalizing the above, edges with parallel clones may not occur in + chordless cycles. + + In a directed graph, two chordless cycles are distinct if they are not + cyclic permutations of each other. In an undirected graph, two chordless + cycles are distinct if they are not cyclic permutations of each other nor of + the other's reversal. + + Optionally, the cycles are bounded in length. + + We use an algorithm strongly inspired by that of Dias et al [1]_. It has + been modified in the following ways: + + 1. Recursion is avoided, per Python's limitations + + 2. The labeling function is not necessary, because the starting paths + are chosen (and deleted from the host graph) to prevent multiple + occurrences of the same path + + 3. The search is optionally bounded at a specified length + + 4. Support for directed graphs is provided by extending cycles along + forward edges, and blocking nodes along forward and reverse edges + + 5. Support for multigraphs is provided by omitting digons from the set + of forward edges + + Parameters + ---------- + G : NetworkX DiGraph + A directed graph + + length_bound : int or None, optional (default=None) + If length_bound is an int, generate all simple cycles of G with length at + most length_bound. Otherwise, generate all simple cycles of G. + + Yields + ------ + list of nodes + Each cycle is represented by a list of nodes along the cycle. + + Examples + -------- + >>> sorted(list(nx.chordless_cycles(nx.complete_graph(4)))) + [[1, 0, 2], [1, 0, 3], [2, 0, 3], [2, 1, 3]] + + Notes + ----- + When length_bound is None, and the graph is simple, the time complexity is + $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$ chordless cycles. + + Raises + ------ + ValueError + when length_bound < 0. + + References + ---------- + .. [1] Efficient enumeration of chordless cycles + E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi + https://arxiv.org/abs/1309.1051 + + See Also + -------- + simple_cycles + """ + + if length_bound is not None: + if length_bound == 0: + return + elif length_bound < 0: + raise ValueError("length bound must be non-negative") + + directed = G.is_directed() + multigraph = G.is_multigraph() + + if multigraph: + yield from ([v] for v, Gv in G.adj.items() if len(Gv.get(v, ())) == 1) + else: + yield from ([v] for v, Gv in G.adj.items() if v in Gv) + + if length_bound is not None and length_bound == 1: + return + + # Nodes with loops cannot belong to longer cycles. Let's delete them here. + # also, we implicitly reduce the multiplicity of edges down to 1 in the case + # of multiedges. + loops = set(nx.nodes_with_selfloops(G)) + edges = ((u, v) for u in G if u not in loops for v in G._adj[u] if v not in loops) + if directed: + F = nx.DiGraph(edges) + B = F.to_undirected(as_view=False) + else: + F = nx.Graph(edges) + B = None + + # If we're given a multigraph, we have a few cases to consider with parallel + # edges. + # + # 1. If we have 2 or more edges in parallel between the nodes (u, v), we + # must not construct longer cycles along (u, v). + # 2. If G is not directed, then a pair of parallel edges between (u, v) is a + # chordless cycle unless there exists a third (or more) parallel edge. + # 3. If G is directed, then parallel edges do not form cycles, but do + # preclude back-edges from forming cycles (handled in the next section), + # Thus, if an edge (u, v) is duplicated and the reverse (v, u) is also + # present, then we remove both from F. + # + # In directed graphs, we need to consider both directions that edges can + # take, so iterate over all edges (u, v) and possibly (v, u). In undirected + # graphs, we need to be a little careful to only consider every edge once, + # so we use a "visited" set to emulate node-order comparisons. + + if multigraph: + if not directed: + B = F.copy() + visited = set() + for u, Gu in G.adj.items(): + if u in loops: + continue + if directed: + multiplicity = ((v, len(Guv)) for v, Guv in Gu.items()) + for v, m in multiplicity: + if m > 1: + F.remove_edges_from(((u, v), (v, u))) + else: + multiplicity = ((v, len(Guv)) for v, Guv in Gu.items() if v in visited) + for v, m in multiplicity: + if m == 2: + yield [u, v] + if m > 1: + F.remove_edge(u, v) + visited.add(u) + + # If we're given a directed graphs, we need to think about digons. If we + # have two edges (u, v) and (v, u), then that's a two-cycle. If either edge + # was duplicated above, then we removed both from F. So, any digons we find + # here are chordless. After finding digons, we remove their edges from F + # to avoid traversing them in the search for chordless cycles. + if directed: + for u, Fu in F.adj.items(): + digons = [[u, v] for v in Fu if F.has_edge(v, u)] + yield from digons + F.remove_edges_from(digons) + F.remove_edges_from(e[::-1] for e in digons) + + if length_bound is not None and length_bound == 2: + return + + # Now, we prepare to search for cycles. We have removed all cycles of + # lengths 1 and 2, so F is a simple graph or simple digraph. We repeatedly + # separate digraphs into their strongly connected components, and undirected + # graphs into their biconnected components. For each component, we pick a + # node v, search for chordless cycles based at each "stem" (u, v, w), and + # then remove v from that component before separating the graph again. + if directed: + separate = nx.strongly_connected_components + + # Directed stems look like (u -> v -> w), so we use the product of + # predecessors of v with successors of v. + def stems(C, v): + for u, w in product(C.pred[v], C.succ[v]): + if not G.has_edge(u, w): # omit stems with acyclic chords + yield [u, v, w], F.has_edge(w, u) + + else: + separate = nx.biconnected_components + + # Undirected stems look like (u ~ v ~ w), but we must not also search + # (w ~ v ~ u), so we use combinations of v's neighbors of length 2. + def stems(C, v): + yield from (([u, v, w], F.has_edge(w, u)) for u, w in combinations(C[v], 2)) + + components = [c for c in separate(F) if len(c) > 2] + while components: + c = components.pop() + v = next(iter(c)) + Fc = F.subgraph(c) + Fcc = Bcc = None + for S, is_triangle in stems(Fc, v): + if is_triangle: + yield S + else: + if Fcc is None: + Fcc = _NeighborhoodCache(Fc) + Bcc = Fcc if B is None else _NeighborhoodCache(B.subgraph(c)) + yield from _chordless_cycle_search(Fcc, Bcc, S, length_bound) + + components.extend(c for c in separate(F.subgraph(c - {v})) if len(c) > 2) + + +def _chordless_cycle_search(F, B, path, length_bound): + """The main loop for chordless cycle enumeration. + + This algorithm is strongly inspired by that of Dias et al [1]_. It has been + modified in the following ways: + + 1. Recursion is avoided, per Python's limitations + + 2. The labeling function is not necessary, because the starting paths + are chosen (and deleted from the host graph) to prevent multiple + occurrences of the same path + + 3. The search is optionally bounded at a specified length + + 4. Support for directed graphs is provided by extending cycles along + forward edges, and blocking nodes along forward and reverse edges + + 5. Support for multigraphs is provided by omitting digons from the set + of forward edges + + Parameters + ---------- + F : _NeighborhoodCache + A graph of forward edges to follow in constructing cycles + + B : _NeighborhoodCache + A graph of blocking edges to prevent the production of chordless cycles + + path : list + A cycle prefix. All cycles generated will begin with this prefix. + + length_bound : int + A length bound. All cycles generated will have length at most length_bound. + + + Yields + ------ + list of nodes + Each cycle is represented by a list of nodes along the cycle. + + References + ---------- + .. [1] Efficient enumeration of chordless cycles + E. Dias and D. Castonguay and H. Longo and W.A.R. Jradi + https://arxiv.org/abs/1309.1051 + + """ + blocked = defaultdict(int) + target = path[0] + blocked[path[1]] = 1 + for w in path[1:]: + for v in B[w]: + blocked[v] += 1 + + stack = [iter(F[path[2]])] + while stack: + nbrs = stack[-1] + for w in nbrs: + if blocked[w] == 1 and (length_bound is None or len(path) < length_bound): + Fw = F[w] + if target in Fw: + yield path + [w] + else: + Bw = B[w] + if target in Bw: + continue + for v in Bw: + blocked[v] += 1 + path.append(w) + stack.append(iter(Fw)) + break + else: + stack.pop() + for v in B[path.pop()]: + blocked[v] -= 1 + + +@not_implemented_for("undirected") +@nx._dispatchable(mutates_input=True) +def recursive_simple_cycles(G): + """Find simple cycles (elementary circuits) of a directed graph. + + A `simple cycle`, or `elementary circuit`, is a closed path where + no node appears twice. Two elementary circuits are distinct if they + are not cyclic permutations of each other. + + This version uses a recursive algorithm to build a list of cycles. + You should probably use the iterator version called simple_cycles(). + Warning: This recursive version uses lots of RAM! + It appears in NetworkX for pedagogical value. + + Parameters + ---------- + G : NetworkX DiGraph + A directed graph + + Returns + ------- + A list of cycles, where each cycle is represented by a list of nodes + along the cycle. + + Example: + + >>> edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)] + >>> G = nx.DiGraph(edges) + >>> nx.recursive_simple_cycles(G) + [[0], [2], [0, 1, 2], [0, 2], [1, 2]] + + Notes + ----- + The implementation follows pp. 79-80 in [1]_. + + The time complexity is $O((n+e)(c+1))$ for $n$ nodes, $e$ edges and $c$ + elementary circuits. + + References + ---------- + .. [1] Finding all the elementary circuits of a directed graph. + D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975. + https://doi.org/10.1137/0204007 + + See Also + -------- + simple_cycles, cycle_basis + """ + + # Jon Olav Vik, 2010-08-09 + def _unblock(thisnode): + """Recursively unblock and remove nodes from B[thisnode].""" + if blocked[thisnode]: + blocked[thisnode] = False + while B[thisnode]: + _unblock(B[thisnode].pop()) + + def circuit(thisnode, startnode, component): + closed = False # set to True if elementary path is closed + path.append(thisnode) + blocked[thisnode] = True + for nextnode in component[thisnode]: # direct successors of thisnode + if nextnode == startnode: + result.append(path[:]) + closed = True + elif not blocked[nextnode]: + if circuit(nextnode, startnode, component): + closed = True + if closed: + _unblock(thisnode) + else: + for nextnode in component[thisnode]: + if thisnode not in B[nextnode]: # TODO: use set for speedup? + B[nextnode].append(thisnode) + path.pop() # remove thisnode from path + return closed + + path = [] # stack of nodes in current path + blocked = defaultdict(bool) # vertex: blocked from search? + B = defaultdict(list) # graph portions that yield no elementary circuit + result = [] # list to accumulate the circuits found + + # Johnson's algorithm exclude self cycle edges like (v, v) + # To be backward compatible, we record those cycles in advance + # and then remove from subG + for v in G: + if G.has_edge(v, v): + result.append([v]) + G.remove_edge(v, v) + + # Johnson's algorithm requires some ordering of the nodes. + # They might not be sortable so we assign an arbitrary ordering. + ordering = dict(zip(G, range(len(G)))) + for s in ordering: + # Build the subgraph induced by s and following nodes in the ordering + subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s]) + # Find the strongly connected component in the subgraph + # that contains the least node according to the ordering + strongcomp = nx.strongly_connected_components(subgraph) + mincomp = min(strongcomp, key=lambda ns: min(ordering[n] for n in ns)) + component = G.subgraph(mincomp) + if len(component) > 1: + # smallest node in the component according to the ordering + startnode = min(component, key=ordering.__getitem__) + for node in component: + blocked[node] = False + B[node][:] = [] + dummy = circuit(startnode, startnode, component) + return result + + +@nx._dispatchable +def find_cycle(G, source=None, orientation=None): + """Returns a cycle found via depth-first traversal. + + The cycle is a list of edges indicating the cyclic path. + Orientation of directed edges is controlled by `orientation`. + + Parameters + ---------- + G : graph + A directed/undirected graph/multigraph. + + source : node, list of nodes + The node from which the traversal begins. If None, then a source + is chosen arbitrarily and repeatedly until all edges from each node in + the graph are searched. + + orientation : None | 'original' | 'reverse' | 'ignore' (default: None) + For directed graphs and directed multigraphs, edge traversals need not + respect the original orientation of the edges. + When set to 'reverse' every edge is traversed in the reverse direction. + When set to 'ignore', every edge is treated as undirected. + When set to 'original', every edge is treated as directed. + In all three cases, the yielded edge tuples add a last entry to + indicate the direction in which that edge was traversed. + If orientation is None, the yielded edge has no direction indicated. + The direction is respected, but not reported. + + Returns + ------- + edges : directed edges + A list of directed edges indicating the path taken for the loop. + If no cycle is found, then an exception is raised. + For graphs, an edge is of the form `(u, v)` where `u` and `v` + are the tail and head of the edge as determined by the traversal. + For multigraphs, an edge is of the form `(u, v, key)`, where `key` is + the key of the edge. When the graph is directed, then `u` and `v` + are always in the order of the actual directed edge. + If orientation is not None then the edge tuple is extended to include + the direction of traversal ('forward' or 'reverse') on that edge. + + Raises + ------ + NetworkXNoCycle + If no cycle was found. + + Examples + -------- + In this example, we construct a DAG and find, in the first call, that there + are no directed cycles, and so an exception is raised. In the second call, + we ignore edge orientations and find that there is an undirected cycle. + Note that the second call finds a directed cycle while effectively + traversing an undirected graph, and so, we found an "undirected cycle". + This means that this DAG structure does not form a directed tree (which + is also known as a polytree). + + >>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2)]) + >>> nx.find_cycle(G, orientation="original") + Traceback (most recent call last): + ... + networkx.exception.NetworkXNoCycle: No cycle found. + >>> list(nx.find_cycle(G, orientation="ignore")) + [(0, 1, 'forward'), (1, 2, 'forward'), (0, 2, 'reverse')] + + See Also + -------- + simple_cycles + """ + if not G.is_directed() or orientation in (None, "original"): + + def tailhead(edge): + return edge[:2] + + elif orientation == "reverse": + + def tailhead(edge): + return edge[1], edge[0] + + elif orientation == "ignore": + + def tailhead(edge): + if edge[-1] == "reverse": + return edge[1], edge[0] + return edge[:2] + + explored = set() + cycle = [] + final_node = None + for start_node in G.nbunch_iter(source): + if start_node in explored: + # No loop is possible. + continue + + edges = [] + # All nodes seen in this iteration of edge_dfs + seen = {start_node} + # Nodes in active path. + active_nodes = {start_node} + previous_head = None + + for edge in nx.edge_dfs(G, start_node, orientation): + # Determine if this edge is a continuation of the active path. + tail, head = tailhead(edge) + if head in explored: + # Then we've already explored it. No loop is possible. + continue + if previous_head is not None and tail != previous_head: + # This edge results from backtracking. + # Pop until we get a node whose head equals the current tail. + # So for example, we might have: + # (0, 1), (1, 2), (2, 3), (1, 4) + # which must become: + # (0, 1), (1, 4) + while True: + try: + popped_edge = edges.pop() + except IndexError: + edges = [] + active_nodes = {tail} + break + else: + popped_head = tailhead(popped_edge)[1] + active_nodes.remove(popped_head) + + if edges: + last_head = tailhead(edges[-1])[1] + if tail == last_head: + break + edges.append(edge) + + if head in active_nodes: + # We have a loop! + cycle.extend(edges) + final_node = head + break + else: + seen.add(head) + active_nodes.add(head) + previous_head = head + + if cycle: + break + else: + explored.update(seen) + + else: + assert len(cycle) == 0 + raise nx.exception.NetworkXNoCycle("No cycle found.") + + # We now have a list of edges which ends on a cycle. + # So we need to remove from the beginning edges that are not relevant. + + for i, edge in enumerate(cycle): + tail, head = tailhead(edge) + if tail == final_node: + break + + return cycle[i:] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def minimum_cycle_basis(G, weight=None): + """Returns a minimum weight cycle basis for G + + Minimum weight means a cycle basis for which the total weight + (length for unweighted graphs) of all the cycles is minimum. + + Parameters + ---------- + G : NetworkX Graph + weight: string + name of the edge attribute to use for edge weights + + Returns + ------- + A list of cycle lists. Each cycle list is a list of nodes + which forms a cycle (loop) in G. Note that the nodes are not + necessarily returned in a order by which they appear in the cycle + + Examples + -------- + >>> G = nx.Graph() + >>> nx.add_cycle(G, [0, 1, 2, 3]) + >>> nx.add_cycle(G, [0, 3, 4, 5]) + >>> nx.minimum_cycle_basis(G) + [[5, 4, 3, 0], [3, 2, 1, 0]] + + References: + [1] Kavitha, Telikepalli, et al. "An O(m^2n) Algorithm for + Minimum Cycle Basis of Graphs." + http://link.springer.com/article/10.1007/s00453-007-9064-z + [2] de Pina, J. 1995. Applications of shortest path methods. + Ph.D. thesis, University of Amsterdam, Netherlands + + See Also + -------- + simple_cycles, cycle_basis + """ + # We first split the graph in connected subgraphs + return sum( + (_min_cycle_basis(G.subgraph(c), weight) for c in nx.connected_components(G)), + [], + ) + + +def _min_cycle_basis(G, weight): + cb = [] + # We extract the edges not in a spanning tree. We do not really need a + # *minimum* spanning tree. That is why we call the next function with + # weight=None. Depending on implementation, it may be faster as well + tree_edges = list(nx.minimum_spanning_edges(G, weight=None, data=False)) + chords = G.edges - tree_edges - {(v, u) for u, v in tree_edges} + + # We maintain a set of vectors orthogonal to sofar found cycles + set_orth = [{edge} for edge in chords] + while set_orth: + base = set_orth.pop() + # kth cycle is "parallel" to kth vector in set_orth + cycle_edges = _min_cycle(G, base, weight) + cb.append([v for u, v in cycle_edges]) + + # now update set_orth so that k+1,k+2... th elements are + # orthogonal to the newly found cycle, as per [p. 336, 1] + set_orth = [ + ( + {e for e in orth if e not in base if e[::-1] not in base} + | {e for e in base if e not in orth if e[::-1] not in orth} + ) + if sum((e in orth or e[::-1] in orth) for e in cycle_edges) % 2 + else orth + for orth in set_orth + ] + return cb + + +def _min_cycle(G, orth, weight): + """ + Computes the minimum weight cycle in G, + orthogonal to the vector orth as per [p. 338, 1] + Use (u, 1) to indicate the lifted copy of u (denoted u' in paper). + """ + Gi = nx.Graph() + + # Add 2 copies of each edge in G to Gi. + # If edge is in orth, add cross edge; otherwise in-plane edge + for u, v, wt in G.edges(data=weight, default=1): + if (u, v) in orth or (v, u) in orth: + Gi.add_edges_from([(u, (v, 1)), ((u, 1), v)], Gi_weight=wt) + else: + Gi.add_edges_from([(u, v), ((u, 1), (v, 1))], Gi_weight=wt) + + # find the shortest length in Gi between n and (n, 1) for each n + # Note: Use "Gi_weight" for name of weight attribute + spl = nx.shortest_path_length + lift = {n: spl(Gi, source=n, target=(n, 1), weight="Gi_weight") for n in G} + + # Now compute that short path in Gi, which translates to a cycle in G + start = min(lift, key=lift.get) + end = (start, 1) + min_path_i = nx.shortest_path(Gi, source=start, target=end, weight="Gi_weight") + + # Now we obtain the actual path, re-map nodes in Gi to those in G + min_path = [n if n in G else n[0] for n in min_path_i] + + # Now remove the edges that occur two times + # two passes: flag which edges get kept, then build it + edgelist = list(pairwise(min_path)) + edgeset = set() + for e in edgelist: + if e in edgeset: + edgeset.remove(e) + elif e[::-1] in edgeset: + edgeset.remove(e[::-1]) + else: + edgeset.add(e) + + min_edgelist = [] + for e in edgelist: + if e in edgeset: + min_edgelist.append(e) + edgeset.remove(e) + elif e[::-1] in edgeset: + min_edgelist.append(e[::-1]) + edgeset.remove(e[::-1]) + + return min_edgelist + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def girth(G): + """Returns the girth of the graph. + + The girth of a graph is the length of its shortest cycle, or infinity if + the graph is acyclic. The algorithm follows the description given on the + Wikipedia page [1]_, and runs in time O(mn) on a graph with m edges and n + nodes. + + Parameters + ---------- + G : NetworkX Graph + + Returns + ------- + int or math.inf + + Examples + -------- + All examples below (except P_5) can easily be checked using Wikipedia, + which has a page for each of these famous graphs. + + >>> nx.girth(nx.chvatal_graph()) + 4 + >>> nx.girth(nx.tutte_graph()) + 4 + >>> nx.girth(nx.petersen_graph()) + 5 + >>> nx.girth(nx.heawood_graph()) + 6 + >>> nx.girth(nx.pappus_graph()) + 6 + >>> nx.girth(nx.path_graph(5)) + inf + + References + ---------- + .. [1] `Wikipedia: Girth `_ + + """ + girth = depth_limit = inf + tree_edge = nx.algorithms.traversal.breadth_first_search.TREE_EDGE + level_edge = nx.algorithms.traversal.breadth_first_search.LEVEL_EDGE + for n in G: + # run a BFS from source n, keeping track of distances; since we want + # the shortest cycle, no need to explore beyond the current minimum length + depth = {n: 0} + for u, v, label in nx.bfs_labeled_edges(G, n): + du = depth[u] + if du > depth_limit: + break + if label is tree_edge: + depth[v] = du + 1 + else: + # if (u, v) is a level edge, the length is du + du + 1 (odd) + # otherwise, it's a forward edge; length is du + (du + 1) + 1 (even) + delta = label is level_edge + length = du + du + 2 - delta + if length < girth: + girth = length + depth_limit = du - delta + + return girth diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/d_separation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/d_separation.py new file mode 100644 index 0000000..3d85a2c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/d_separation.py @@ -0,0 +1,677 @@ +""" +Algorithm for testing d-separation in DAGs. + +*d-separation* is a test for conditional independence in probability +distributions that can be factorized using DAGs. It is a purely +graphical test that uses the underlying graph and makes no reference +to the actual distribution parameters. See [1]_ for a formal +definition. + +The implementation is based on the conceptually simple linear time +algorithm presented in [2]_. Refer to [3]_, [4]_ for a couple of +alternative algorithms. + +The functional interface in NetworkX consists of three functions: + +- `find_minimal_d_separator` returns a minimal d-separator set ``z``. + That is, removing any node or nodes from it makes it no longer a d-separator. +- `is_d_separator` checks if a given set is a d-separator. +- `is_minimal_d_separator` checks if a given set is a minimal d-separator. + +D-separators +------------ + +Here, we provide a brief overview of d-separation and related concepts that +are relevant for understanding it: + +The ideas of d-separation and d-connection relate to paths being open or blocked. + +- A "path" is a sequence of nodes connected in order by edges. Unlike for most + graph theory analysis, the direction of the edges is ignored. Thus the path + can be thought of as a traditional path on the undirected version of the graph. +- A "candidate d-separator" ``z`` is a set of nodes being considered as + possibly blocking all paths between two prescribed sets ``x`` and ``y`` of nodes. + We refer to each node in the candidate d-separator as "known". +- A "collider" node on a path is a node that is a successor of its two neighbor + nodes on the path. That is, ``c`` is a collider if the edge directions + along the path look like ``... u -> c <- v ...``. +- If a collider node or any of its descendants are "known", the collider + is called an "open collider". Otherwise it is a "blocking collider". +- Any path can be "blocked" in two ways. If the path contains a "known" node + that is not a collider, the path is blocked. Also, if the path contains a + collider that is not a "known" node, the path is blocked. +- A path is "open" if it is not blocked. That is, it is open if every node is + either an open collider or not a "known". Said another way, every + "known" in the path is a collider and every collider is open (has a + "known" as a inclusive descendant). The concept of "open path" is meant to + demonstrate a probabilistic conditional dependence between two nodes given + prescribed knowledge ("known" nodes). +- Two sets ``x`` and ``y`` of nodes are "d-separated" by a set of nodes ``z`` + if all paths between nodes in ``x`` and nodes in ``y`` are blocked. That is, + if there are no open paths from any node in ``x`` to any node in ``y``. + Such a set ``z`` is a "d-separator" of ``x`` and ``y``. +- A "minimal d-separator" is a d-separator ``z`` for which no node or subset + of nodes can be removed with it still being a d-separator. + +The d-separator blocks some paths between ``x`` and ``y`` but opens others. +Nodes in the d-separator block paths if the nodes are not colliders. +But if a collider or its descendant nodes are in the d-separation set, the +colliders are open, allowing a path through that collider. + +Illustration of D-separation with examples +------------------------------------------ + +A pair of two nodes, ``u`` and ``v``, are d-connected if there is a path +from ``u`` to ``v`` that is not blocked. That means, there is an open +path from ``u`` to ``v``. + +For example, if the d-separating set is the empty set, then the following paths are +open between ``u`` and ``v``: + +- u <- n -> v +- u -> w -> ... -> n -> v + +If on the other hand, ``n`` is in the d-separating set, then ``n`` blocks +those paths between ``u`` and ``v``. + +Colliders block a path if they and their descendants are not included +in the d-separating set. An example of a path that is blocked when the +d-separating set is empty is: + +- u -> w -> ... -> n <- v + +The node ``n`` is a collider in this path and is not in the d-separating set. +So ``n`` blocks this path. However, if ``n`` or a descendant of ``n`` is +included in the d-separating set, then the path through the collider +at ``n`` (... -> n <- ...) is "open". + +D-separation is concerned with blocking all paths between nodes from ``x`` to ``y``. +A d-separating set between ``x`` and ``y`` is one where all paths are blocked. + +D-separation and its applications in probability +------------------------------------------------ + +D-separation is commonly used in probabilistic causal-graph models. D-separation +connects the idea of probabilistic "dependence" with separation in a graph. If +one assumes the causal Markov condition [5]_, (every node is conditionally +independent of its non-descendants, given its parents) then d-separation implies +conditional independence in probability distributions. +Symmetrically, d-connection implies dependence. + +The intuition is as follows. The edges on a causal graph indicate which nodes +influence the outcome of other nodes directly. An edge from u to v +implies that the outcome of event ``u`` influences the probabilities for +the outcome of event ``v``. Certainly knowing ``u`` changes predictions for ``v``. +But also knowing ``v`` changes predictions for ``u``. The outcomes are dependent. +Furthermore, an edge from ``v`` to ``w`` would mean that ``w`` and ``v`` are dependent +and thus that ``u`` could indirectly influence ``w``. + +Without any knowledge about the system (candidate d-separating set is empty) +a causal graph ``u -> v -> w`` allows all three nodes to be dependent. But +if we know the outcome of ``v``, the conditional probabilities of outcomes for +``u`` and ``w`` are independent of each other. That is, once we know the outcome +for ``v``, the probabilities for ``w`` do not depend on the outcome for ``u``. +This is the idea behind ``v`` blocking the path if it is "known" (in the candidate +d-separating set). + +The same argument works whether the direction of the edges are both +left-going and when both arrows head out from the middle. Having a "known" +node on a path blocks the collider-free path because those relationships +make the conditional probabilities independent. + +The direction of the causal edges does impact dependence precisely in the +case of a collider e.g. ``u -> v <- w``. In that situation, both ``u`` and ``w`` +influence ``v``. But they do not directly influence each other. So without any +knowledge of any outcomes, ``u`` and ``w`` are independent. That is the idea behind +colliders blocking the path. But, if ``v`` is known, the conditional probabilities +of ``u`` and ``w`` can be dependent. This is the heart of Berkson's Paradox [6]_. +For example, suppose ``u`` and ``w`` are boolean events (they either happen or do not) +and ``v`` represents the outcome "at least one of ``u`` and ``w`` occur". Then knowing +``v`` is true makes the conditional probabilities of ``u`` and ``w`` dependent. +Essentially, knowing that at least one of them is true raises the probability of +each. But further knowledge that ``w`` is true (or false) change the conditional +probability of ``u`` to either the original value or 1. So the conditional +probability of ``u`` depends on the outcome of ``w`` even though there is no +causal relationship between them. When a collider is known, dependence can +occur across paths through that collider. This is the reason open colliders +do not block paths. + +Furthermore, even if ``v`` is not "known", if one of its descendants is "known" +we can use that information to know more about ``v`` which again makes +``u`` and ``w`` potentially dependent. Suppose the chance of ``n`` occurring +is much higher when ``v`` occurs ("at least one of ``u`` and ``w`` occur"). +Then if we know ``n`` occurred, it is more likely that ``v`` occurred and that +makes the chance of ``u`` and ``w`` dependent. This is the idea behind why +a collider does no block a path if any descendant of the collider is "known". + +When two sets of nodes ``x`` and ``y`` are d-separated by a set ``z``, +it means that given the outcomes of the nodes in ``z``, the probabilities +of outcomes of the nodes in ``x`` are independent of the outcomes of the +nodes in ``y`` and vice versa. + +Examples +-------- +A Hidden Markov Model with 5 observed states and 5 hidden states +where the hidden states have causal relationships resulting in +a path results in the following causal network. We check that +early states along the path are separated from late state in +the path by the d-separator of the middle hidden state. +Thus if we condition on the middle hidden state, the early +state probabilities are independent of the late state outcomes. + +>>> G = nx.DiGraph() +>>> G.add_edges_from( +... [ +... ("H1", "H2"), +... ("H2", "H3"), +... ("H3", "H4"), +... ("H4", "H5"), +... ("H1", "O1"), +... ("H2", "O2"), +... ("H3", "O3"), +... ("H4", "O4"), +... ("H5", "O5"), +... ] +... ) +>>> x, y, z = ({"H1", "O1"}, {"H5", "O5"}, {"H3"}) +>>> nx.is_d_separator(G, x, y, z) +True +>>> nx.is_minimal_d_separator(G, x, y, z) +True +>>> nx.is_minimal_d_separator(G, x, y, z | {"O3"}) +False +>>> z = nx.find_minimal_d_separator(G, x | y, {"O2", "O3", "O4"}) +>>> z == {"H2", "H4"} +True + +If no minimal_d_separator exists, `None` is returned + +>>> other_z = nx.find_minimal_d_separator(G, x | y, {"H2", "H3"}) +>>> other_z is None +True + + +References +---------- + +.. [1] Pearl, J. (2009). Causality. Cambridge: Cambridge University Press. + +.. [2] Darwiche, A. (2009). Modeling and reasoning with Bayesian networks. + Cambridge: Cambridge University Press. + +.. [3] Shachter, Ross D. "Bayes-ball: The rational pastime (for + determining irrelevance and requisite information in belief networks + and influence diagrams)." In Proceedings of the Fourteenth Conference + on Uncertainty in Artificial Intelligence (UAI), (pp. 480–487). 1998. + +.. [4] Koller, D., & Friedman, N. (2009). + Probabilistic graphical models: principles and techniques. The MIT Press. + +.. [5] https://en.wikipedia.org/wiki/Causal_Markov_condition + +.. [6] https://en.wikipedia.org/wiki/Berkson%27s_paradox + +""" + +from collections import deque +from itertools import chain + +import networkx as nx +from networkx.utils import UnionFind, not_implemented_for + +__all__ = [ + "is_d_separator", + "is_minimal_d_separator", + "find_minimal_d_separator", +] + + +@not_implemented_for("undirected") +@nx._dispatchable +def is_d_separator(G, x, y, z): + """Return whether node sets `x` and `y` are d-separated by `z`. + + Parameters + ---------- + G : nx.DiGraph + A NetworkX DAG. + + x : node or set of nodes + First node or set of nodes in `G`. + + y : node or set of nodes + Second node or set of nodes in `G`. + + z : node or set of nodes + Potential separator (set of conditioning nodes in `G`). Can be empty set. + + Returns + ------- + b : bool + A boolean that is true if `x` is d-separated from `y` given `z` in `G`. + + Raises + ------ + NetworkXError + The *d-separation* test is commonly used on disjoint sets of + nodes in acyclic directed graphs. Accordingly, the algorithm + raises a :exc:`NetworkXError` if the node sets are not + disjoint or if the input graph is not a DAG. + + NodeNotFound + If any of the input nodes are not found in the graph, + a :exc:`NodeNotFound` exception is raised + + Notes + ----- + A d-separating set in a DAG is a set of nodes that + blocks all paths between the two sets. Nodes in `z` + block a path if they are part of the path and are not a collider, + or a descendant of a collider. Also colliders that are not in `z` + block a path. A collider structure along a path + is ``... -> c <- ...`` where ``c`` is the collider node. + + https://en.wikipedia.org/wiki/Bayesian_network#d-separation + """ + try: + x = {x} if x in G else x + y = {y} if y in G else y + z = {z} if z in G else z + + intersection = x & y or x & z or y & z + if intersection: + raise nx.NetworkXError( + f"The sets are not disjoint, with intersection {intersection}" + ) + + set_v = x | y | z + if set_v - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound("One of x, y, or z is not a node or a set of nodes in G") + + if not nx.is_directed_acyclic_graph(G): + raise nx.NetworkXError("graph should be directed acyclic") + + # contains -> and <-> edges from starting node T + forward_deque = deque([]) + forward_visited = set() + + # contains <- and - edges from starting node T + backward_deque = deque(x) + backward_visited = set() + + ancestors_or_z = set().union(*[nx.ancestors(G, node) for node in x]) | z | x + + while forward_deque or backward_deque: + if backward_deque: + node = backward_deque.popleft() + backward_visited.add(node) + if node in y: + return False + if node in z: + continue + + # add <- edges to backward deque + backward_deque.extend(G.pred[node].keys() - backward_visited) + # add -> edges to forward deque + forward_deque.extend(G.succ[node].keys() - forward_visited) + + if forward_deque: + node = forward_deque.popleft() + forward_visited.add(node) + if node in y: + return False + + # Consider if -> node <- is opened due to ancestor of node in z + if node in ancestors_or_z: + # add <- edges to backward deque + backward_deque.extend(G.pred[node].keys() - backward_visited) + if node not in z: + # add -> edges to forward deque + forward_deque.extend(G.succ[node].keys() - forward_visited) + + return True + + +@not_implemented_for("undirected") +@nx._dispatchable +def find_minimal_d_separator(G, x, y, *, included=None, restricted=None): + """Returns a minimal d-separating set between `x` and `y` if possible + + A d-separating set in a DAG is a set of nodes that blocks all + paths between the two sets of nodes, `x` and `y`. This function + constructs a d-separating set that is "minimal", meaning no nodes can + be removed without it losing the d-separating property for `x` and `y`. + If no d-separating sets exist for `x` and `y`, this returns `None`. + + In a DAG there may be more than one minimal d-separator between two + sets of nodes. Minimal d-separators are not always unique. This function + returns one minimal d-separator, or `None` if no d-separator exists. + + Uses the algorithm presented in [1]_. The complexity of the algorithm + is :math:`O(m)`, where :math:`m` stands for the number of edges in + the subgraph of G consisting of only the ancestors of `x` and `y`. + For full details, see [1]_. + + Parameters + ---------- + G : graph + A networkx DAG. + x : set | node + A node or set of nodes in the graph. + y : set | node + A node or set of nodes in the graph. + included : set | node | None + A node or set of nodes which must be included in the found separating set, + default is None, which means the empty set. + restricted : set | node | None + Restricted node or set of nodes to consider. Only these nodes can be in + the found separating set, default is None meaning all nodes in ``G``. + + Returns + ------- + z : set | None + The minimal d-separating set, if at least one d-separating set exists, + otherwise None. + + Raises + ------ + NetworkXError + Raises a :exc:`NetworkXError` if the input graph is not a DAG + or if node sets `x`, `y`, and `included` are not disjoint. + + NodeNotFound + If any of the input nodes are not found in the graph, + a :exc:`NodeNotFound` exception is raised. + + References + ---------- + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. + """ + if not nx.is_directed_acyclic_graph(G): + raise nx.NetworkXError("graph should be directed acyclic") + + try: + x = {x} if x in G else x + y = {y} if y in G else y + + if included is None: + included = set() + elif included in G: + included = {included} + + if restricted is None: + restricted = set(G) + elif restricted in G: + restricted = {restricted} + + set_y = x | y | included | restricted + if set_y - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound( + "One of x, y, included or restricted is not a node or set of nodes in G" + ) + + if not included <= restricted: + raise nx.NetworkXError( + f"Included nodes {included} must be in restricted nodes {restricted}" + ) + + intersection = x & y or x & included or y & included + if intersection: + raise nx.NetworkXError( + f"The sets x, y, included are not disjoint. Overlap: {intersection}" + ) + + nodeset = x | y | included + ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, node) for node in nodeset]) + + z_init = restricted & (ancestors_x_y_included - (x | y)) + + x_closure = _reachable(G, x, ancestors_x_y_included, z_init) + if x_closure & y: + return None + + z_updated = z_init & (x_closure | included) + y_closure = _reachable(G, y, ancestors_x_y_included, z_updated) + return z_updated & (y_closure | included) + + +@not_implemented_for("undirected") +@nx._dispatchable +def is_minimal_d_separator(G, x, y, z, *, included=None, restricted=None): + """Determine if `z` is a minimal d-separator for `x` and `y`. + + A d-separator, `z`, in a DAG is a set of nodes that blocks + all paths from nodes in set `x` to nodes in set `y`. + A minimal d-separator is a d-separator `z` such that removing + any subset of nodes makes it no longer a d-separator. + + Note: This function checks whether `z` is a d-separator AND is + minimal. One can use the function `is_d_separator` to only check if + `z` is a d-separator. See examples below. + + Parameters + ---------- + G : nx.DiGraph + A NetworkX DAG. + x : node | set + A node or set of nodes in the graph. + y : node | set + A node or set of nodes in the graph. + z : node | set + The node or set of nodes to check if it is a minimal d-separating set. + The function :func:`is_d_separator` is called inside this function + to verify that `z` is in fact a d-separator. + included : set | node | None + A node or set of nodes which must be included in the found separating set, + default is ``None``, which means the empty set. + restricted : set | node | None + Restricted node or set of nodes to consider. Only these nodes can be in + the found separating set, default is ``None`` meaning all nodes in ``G``. + + Returns + ------- + bool + Whether or not the set `z` is a minimal d-separator subject to + `restricted` nodes and `included` node constraints. + + Examples + -------- + >>> G = nx.path_graph([0, 1, 2, 3], create_using=nx.DiGraph) + >>> G.add_node(4) + >>> nx.is_minimal_d_separator(G, 0, 2, {1}) + True + >>> # since {1} is the minimal d-separator, {1, 3, 4} is not minimal + >>> nx.is_minimal_d_separator(G, 0, 2, {1, 3, 4}) + False + >>> # alternatively, if we only want to check that {1, 3, 4} is a d-separator + >>> nx.is_d_separator(G, 0, 2, {1, 3, 4}) + True + + Raises + ------ + NetworkXError + Raises a :exc:`NetworkXError` if the input graph is not a DAG. + + NodeNotFound + If any of the input nodes are not found in the graph, + a :exc:`NodeNotFound` exception is raised. + + References + ---------- + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. + + Notes + ----- + This function works on verifying that a set is minimal and + d-separating between two nodes. Uses criterion (a), (b), (c) on + page 4 of [1]_. a) closure(`x`) and `y` are disjoint. b) `z` contains + all nodes from `included` and is contained in the `restricted` + nodes and in the union of ancestors of `x`, `y`, and `included`. + c) the nodes in `z` not in `included` are contained in both + closure(x) and closure(y). The closure of a set is the set of nodes + connected to the set by a directed path in G. + + The complexity is :math:`O(m)`, where :math:`m` stands for the + number of edges in the subgraph of G consisting of only the + ancestors of `x` and `y`. + + For full details, see [1]_. + """ + if not nx.is_directed_acyclic_graph(G): + raise nx.NetworkXError("graph should be directed acyclic") + + try: + x = {x} if x in G else x + y = {y} if y in G else y + z = {z} if z in G else z + + if included is None: + included = set() + elif included in G: + included = {included} + + if restricted is None: + restricted = set(G) + elif restricted in G: + restricted = {restricted} + + set_y = x | y | included | restricted + if set_y - G.nodes: + raise nx.NodeNotFound(f"The node(s) {set_y - G.nodes} are not found in G") + except TypeError: + raise nx.NodeNotFound( + "One of x, y, z, included or restricted is not a node or set of nodes in G" + ) + + if not included <= z: + raise nx.NetworkXError( + f"Included nodes {included} must be in proposed separating set z {x}" + ) + if not z <= restricted: + raise nx.NetworkXError( + f"Separating set {z} must be contained in restricted set {restricted}" + ) + + intersection = x.intersection(y) or x.intersection(z) or y.intersection(z) + if intersection: + raise nx.NetworkXError( + f"The sets are not disjoint, with intersection {intersection}" + ) + + nodeset = x | y | included + ancestors_x_y_included = nodeset.union(*[nx.ancestors(G, n) for n in nodeset]) + + # criterion (a) -- check that z is actually a separator + x_closure = _reachable(G, x, ancestors_x_y_included, z) + if x_closure & y: + return False + + # criterion (b) -- basic constraint; included and restricted already checked above + if not (z <= ancestors_x_y_included): + return False + + # criterion (c) -- check that z is minimal + y_closure = _reachable(G, y, ancestors_x_y_included, z) + if not ((z - included) <= (x_closure & y_closure)): + return False + return True + + +@not_implemented_for("undirected") +def _reachable(G, x, a, z): + """Modified Bayes-Ball algorithm for finding d-connected nodes. + + Find all nodes in `a` that are d-connected to those in `x` by + those in `z`. This is an implementation of the function + `REACHABLE` in [1]_ (which is itself a modification of the + Bayes-Ball algorithm [2]_) when restricted to DAGs. + + Parameters + ---------- + G : nx.DiGraph + A NetworkX DAG. + x : node | set + A node in the DAG, or a set of nodes. + a : node | set + A (set of) node(s) in the DAG containing the ancestors of `x`. + z : node | set + The node or set of nodes conditioned on when checking d-connectedness. + + Returns + ------- + w : set + The closure of `x` in `a` with respect to d-connectedness + given `z`. + + References + ---------- + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. + + .. [2] Shachter, Ross D. "Bayes-ball: The rational pastime + (for determining irrelevance and requisite information in + belief networks and influence diagrams)." In Proceedings of the + Fourteenth Conference on Uncertainty in Artificial Intelligence + (UAI), (pp. 480–487). 1998. + """ + + def _pass(e, v, f, n): + """Whether a ball entering node `v` along edge `e` passes to `n` along `f`. + + Boolean function defined on page 6 of [1]_. + + Parameters + ---------- + e : bool + Directed edge by which the ball got to node `v`; `True` iff directed into `v`. + v : node + Node where the ball is. + f : bool + Directed edge connecting nodes `v` and `n`; `True` iff directed `n`. + n : node + Checking whether the ball passes to this node. + + Returns + ------- + b : bool + Whether the ball passes or not. + + References + ---------- + .. [1] van der Zander, Benito, and Maciej Liśkiewicz. "Finding + minimal d-separators in linear time and applications." In + Uncertainty in Artificial Intelligence, pp. 637-647. PMLR, 2020. + """ + is_element_of_A = n in a + # almost_definite_status = True # always true for DAGs; not so for RCGs + collider_if_in_Z = v not in z or (e and not f) + return is_element_of_A and collider_if_in_Z # and almost_definite_status + + queue = deque([]) + for node in x: + if bool(G.pred[node]): + queue.append((True, node)) + if bool(G.succ[node]): + queue.append((False, node)) + processed = queue.copy() + + while any(queue): + e, v = queue.popleft() + preds = ((False, n) for n in G.pred[v]) + succs = ((True, n) for n in G.succ[v]) + f_n_pairs = chain(preds, succs) + for f, n in f_n_pairs: + if (f, n) not in processed and _pass(e, v, f, n): + queue.append((f, n)) + processed.append((f, n)) + + return {w for (_, w) in processed} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/dag.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/dag.py new file mode 100644 index 0000000..130b4dc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/dag.py @@ -0,0 +1,1468 @@ +"""Algorithms for directed acyclic graphs (DAGs). + +Note that most of these functions are only guaranteed to work for DAGs. +In general, these functions do not check for acyclic-ness, so it is up +to the user to check for that. +""" + +import heapq +from collections import deque +from functools import partial +from itertools import chain, combinations, product, starmap +from math import gcd + +import networkx as nx +from networkx.utils import arbitrary_element, not_implemented_for, pairwise + +__all__ = [ + "descendants", + "ancestors", + "topological_sort", + "lexicographical_topological_sort", + "all_topological_sorts", + "topological_generations", + "is_directed_acyclic_graph", + "is_aperiodic", + "transitive_closure", + "transitive_closure_dag", + "transitive_reduction", + "antichains", + "dag_longest_path", + "dag_longest_path_length", + "dag_to_branching", + "compute_v_structures", +] + +chaini = chain.from_iterable + + +@nx._dispatchable +def descendants(G, source): + """Returns all nodes reachable from `source` in `G`. + + Parameters + ---------- + G : NetworkX Graph + source : node in `G` + + Returns + ------- + set() + The descendants of `source` in `G` + + Raises + ------ + NetworkXError + If node `source` is not in `G`. + + Examples + -------- + >>> DG = nx.path_graph(5, create_using=nx.DiGraph) + >>> sorted(nx.descendants(DG, 2)) + [3, 4] + + The `source` node is not a descendant of itself, but can be included manually: + + >>> sorted(nx.descendants(DG, 2) | {2}) + [2, 3, 4] + + See also + -------- + ancestors + """ + return {child for parent, child in nx.bfs_edges(G, source)} + + +@nx._dispatchable +def ancestors(G, source): + """Returns all nodes having a path to `source` in `G`. + + Parameters + ---------- + G : NetworkX Graph + source : node in `G` + + Returns + ------- + set() + The ancestors of `source` in `G` + + Raises + ------ + NetworkXError + If node `source` is not in `G`. + + Examples + -------- + >>> DG = nx.path_graph(5, create_using=nx.DiGraph) + >>> sorted(nx.ancestors(DG, 2)) + [0, 1] + + The `source` node is not an ancestor of itself, but can be included manually: + + >>> sorted(nx.ancestors(DG, 2) | {2}) + [0, 1, 2] + + See also + -------- + descendants + """ + return {child for parent, child in nx.bfs_edges(G, source, reverse=True)} + + +@nx._dispatchable +def has_cycle(G): + """Decides whether the directed graph has a cycle.""" + try: + # Feed the entire iterator into a zero-length deque. + deque(topological_sort(G), maxlen=0) + except nx.NetworkXUnfeasible: + return True + else: + return False + + +@nx._dispatchable +def is_directed_acyclic_graph(G): + """Returns True if the graph `G` is a directed acyclic graph (DAG) or + False if not. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + bool + True if `G` is a DAG, False otherwise + + Examples + -------- + Undirected graph:: + + >>> G = nx.Graph([(1, 2), (2, 3)]) + >>> nx.is_directed_acyclic_graph(G) + False + + Directed graph with cycle:: + + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + >>> nx.is_directed_acyclic_graph(G) + False + + Directed acyclic graph:: + + >>> G = nx.DiGraph([(1, 2), (2, 3)]) + >>> nx.is_directed_acyclic_graph(G) + True + + See also + -------- + topological_sort + """ + return G.is_directed() and not has_cycle(G) + + +@nx._dispatchable +def topological_generations(G): + """Stratifies a DAG into generations. + + A topological generation is node collection in which ancestors of a node in each + generation are guaranteed to be in a previous generation, and any descendants of + a node are guaranteed to be in a following generation. Nodes are guaranteed to + be in the earliest possible generation that they can belong to. + + Parameters + ---------- + G : NetworkX digraph + A directed acyclic graph (DAG) + + Yields + ------ + sets of nodes + Yields sets of nodes representing each generation. + + Raises + ------ + NetworkXError + Generations are defined for directed graphs only. If the graph + `G` is undirected, a :exc:`NetworkXError` is raised. + + NetworkXUnfeasible + If `G` is not a directed acyclic graph (DAG) no topological generations + exist and a :exc:`NetworkXUnfeasible` exception is raised. This can also + be raised if `G` is changed while the returned iterator is being processed + + RuntimeError + If `G` is changed while the returned iterator is being processed. + + Examples + -------- + >>> DG = nx.DiGraph([(2, 1), (3, 1)]) + >>> [sorted(generation) for generation in nx.topological_generations(DG)] + [[2, 3], [1]] + + Notes + ----- + The generation in which a node resides can also be determined by taking the + max-path-distance from the node to the farthest leaf node. That value can + be obtained with this function using `enumerate(topological_generations(G))`. + + See also + -------- + topological_sort + """ + if not G.is_directed(): + raise nx.NetworkXError("Topological sort not defined on undirected graphs.") + + multigraph = G.is_multigraph() + indegree_map = {v: d for v, d in G.in_degree() if d > 0} + zero_indegree = [v for v, d in G.in_degree() if d == 0] + + while zero_indegree: + this_generation = zero_indegree + zero_indegree = [] + for node in this_generation: + if node not in G: + raise RuntimeError("Graph changed during iteration") + for child in G.neighbors(node): + try: + indegree_map[child] -= len(G[node][child]) if multigraph else 1 + except KeyError as err: + raise RuntimeError("Graph changed during iteration") from err + if indegree_map[child] == 0: + zero_indegree.append(child) + del indegree_map[child] + yield this_generation + + if indegree_map: + raise nx.NetworkXUnfeasible( + "Graph contains a cycle or graph changed during iteration" + ) + + +@nx._dispatchable +def topological_sort(G): + """Returns a generator of nodes in topologically sorted order. + + A topological sort is a nonunique permutation of the nodes of a + directed graph such that an edge from u to v implies that u + appears before v in the topological sort order. This ordering is + valid only if the graph has no directed cycles. + + Parameters + ---------- + G : NetworkX digraph + A directed acyclic graph (DAG) + + Yields + ------ + nodes + Yields the nodes in topological sorted order. + + Raises + ------ + NetworkXError + Topological sort is defined for directed graphs only. If the graph `G` + is undirected, a :exc:`NetworkXError` is raised. + + NetworkXUnfeasible + If `G` is not a directed acyclic graph (DAG) no topological sort exists + and a :exc:`NetworkXUnfeasible` exception is raised. This can also be + raised if `G` is changed while the returned iterator is being processed + + RuntimeError + If `G` is changed while the returned iterator is being processed. + + Examples + -------- + To get the reverse order of the topological sort: + + >>> DG = nx.DiGraph([(1, 2), (2, 3)]) + >>> list(reversed(list(nx.topological_sort(DG)))) + [3, 2, 1] + + If your DiGraph naturally has the edges representing tasks/inputs + and nodes representing people/processes that initiate tasks, then + topological_sort is not quite what you need. You will have to change + the tasks to nodes with dependence reflected by edges. The result is + a kind of topological sort of the edges. This can be done + with :func:`networkx.line_graph` as follows: + + >>> list(nx.topological_sort(nx.line_graph(DG))) + [(1, 2), (2, 3)] + + Notes + ----- + This algorithm is based on a description and proof in + "Introduction to Algorithms: A Creative Approach" [1]_ . + + See also + -------- + is_directed_acyclic_graph, lexicographical_topological_sort + + References + ---------- + .. [1] Manber, U. (1989). + *Introduction to Algorithms - A Creative Approach.* Addison-Wesley. + """ + for generation in nx.topological_generations(G): + yield from generation + + +@nx._dispatchable +def lexicographical_topological_sort(G, key=None): + """Generate the nodes in the unique lexicographical topological sort order. + + Generates a unique ordering of nodes by first sorting topologically (for which there are often + multiple valid orderings) and then additionally by sorting lexicographically. + + A topological sort arranges the nodes of a directed graph so that the + upstream node of each directed edge precedes the downstream node. + It is always possible to find a solution for directed graphs that have no cycles. + There may be more than one valid solution. + + Lexicographical sorting is just sorting alphabetically. It is used here to break ties in the + topological sort and to determine a single, unique ordering. This can be useful in comparing + sort results. + + The lexicographical order can be customized by providing a function to the `key=` parameter. + The definition of the key function is the same as used in python's built-in `sort()`. + The function takes a single argument and returns a key to use for sorting purposes. + + Lexicographical sorting can fail if the node names are un-sortable. See the example below. + The solution is to provide a function to the `key=` argument that returns sortable keys. + + + Parameters + ---------- + G : NetworkX digraph + A directed acyclic graph (DAG) + + key : function, optional + A function of one argument that converts a node name to a comparison key. + It defines and resolves ambiguities in the sort order. Defaults to the identity function. + + Yields + ------ + nodes + Yields the nodes of G in lexicographical topological sort order. + + Raises + ------ + NetworkXError + Topological sort is defined for directed graphs only. If the graph `G` + is undirected, a :exc:`NetworkXError` is raised. + + NetworkXUnfeasible + If `G` is not a directed acyclic graph (DAG) no topological sort exists + and a :exc:`NetworkXUnfeasible` exception is raised. This can also be + raised if `G` is changed while the returned iterator is being processed + + RuntimeError + If `G` is changed while the returned iterator is being processed. + + TypeError + Results from un-sortable node names. + Consider using `key=` parameter to resolve ambiguities in the sort order. + + Examples + -------- + >>> DG = nx.DiGraph([(2, 1), (2, 5), (1, 3), (1, 4), (5, 4)]) + >>> list(nx.lexicographical_topological_sort(DG)) + [2, 1, 3, 5, 4] + >>> list(nx.lexicographical_topological_sort(DG, key=lambda x: -x)) + [2, 5, 1, 4, 3] + + The sort will fail for any graph with integer and string nodes. Comparison of integer to strings + is not defined in python. Is 3 greater or less than 'red'? + + >>> DG = nx.DiGraph([(1, "red"), (3, "red"), (1, "green"), (2, "blue")]) + >>> list(nx.lexicographical_topological_sort(DG)) + Traceback (most recent call last): + ... + TypeError: '<' not supported between instances of 'str' and 'int' + ... + + Incomparable nodes can be resolved using a `key` function. This example function + allows comparison of integers and strings by returning a tuple where the first + element is True for `str`, False otherwise. The second element is the node name. + This groups the strings and integers separately so they can be compared only among themselves. + + >>> key = lambda node: (isinstance(node, str), node) + >>> list(nx.lexicographical_topological_sort(DG, key=key)) + [1, 2, 3, 'blue', 'green', 'red'] + + Notes + ----- + This algorithm is based on a description and proof in + "Introduction to Algorithms: A Creative Approach" [1]_ . + + See also + -------- + topological_sort + + References + ---------- + .. [1] Manber, U. (1989). + *Introduction to Algorithms - A Creative Approach.* Addison-Wesley. + """ + if not G.is_directed(): + msg = "Topological sort not defined on undirected graphs." + raise nx.NetworkXError(msg) + + if key is None: + + def key(node): + return node + + nodeid_map = {n: i for i, n in enumerate(G)} + + def create_tuple(node): + return key(node), nodeid_map[node], node + + indegree_map = {v: d for v, d in G.in_degree() if d > 0} + # These nodes have zero indegree and ready to be returned. + zero_indegree = [create_tuple(v) for v, d in G.in_degree() if d == 0] + heapq.heapify(zero_indegree) + + while zero_indegree: + _, _, node = heapq.heappop(zero_indegree) + + if node not in G: + raise RuntimeError("Graph changed during iteration") + for _, child in G.edges(node): + try: + indegree_map[child] -= 1 + except KeyError as err: + raise RuntimeError("Graph changed during iteration") from err + if indegree_map[child] == 0: + try: + heapq.heappush(zero_indegree, create_tuple(child)) + except TypeError as err: + raise TypeError( + f"{err}\nConsider using `key=` parameter to resolve ambiguities in the sort order." + ) + del indegree_map[child] + + yield node + + if indegree_map: + msg = "Graph contains a cycle or graph changed during iteration" + raise nx.NetworkXUnfeasible(msg) + + +@not_implemented_for("undirected") +@nx._dispatchable +def all_topological_sorts(G): + """Returns a generator of _all_ topological sorts of the directed graph G. + + A topological sort is a nonunique permutation of the nodes such that an + edge from u to v implies that u appears before v in the topological sort + order. + + Parameters + ---------- + G : NetworkX DiGraph + A directed graph + + Yields + ------ + topological_sort_order : list + a list of nodes in `G`, representing one of the topological sort orders + + Raises + ------ + NetworkXNotImplemented + If `G` is not directed + NetworkXUnfeasible + If `G` is not acyclic + + Examples + -------- + To enumerate all topological sorts of directed graph: + + >>> DG = nx.DiGraph([(1, 2), (2, 3), (2, 4)]) + >>> list(nx.all_topological_sorts(DG)) + [[1, 2, 4, 3], [1, 2, 3, 4]] + + Notes + ----- + Implements an iterative version of the algorithm given in [1]. + + References + ---------- + .. [1] Knuth, Donald E., Szwarcfiter, Jayme L. (1974). + "A Structured Program to Generate All Topological Sorting Arrangements" + Information Processing Letters, Volume 2, Issue 6, 1974, Pages 153-157, + ISSN 0020-0190, + https://doi.org/10.1016/0020-0190(74)90001-5. + Elsevier (North-Holland), Amsterdam + """ + if not G.is_directed(): + raise nx.NetworkXError("Topological sort not defined on undirected graphs.") + + # the names of count and D are chosen to match the global variables in [1] + # number of edges originating in a vertex v + count = dict(G.in_degree()) + # vertices with indegree 0 + D = deque([v for v, d in G.in_degree() if d == 0]) + # stack of first value chosen at a position k in the topological sort + bases = [] + current_sort = [] + + # do-while construct + while True: + assert all(count[v] == 0 for v in D) + + if len(current_sort) == len(G): + yield list(current_sort) + + # clean-up stack + while len(current_sort) > 0: + assert len(bases) == len(current_sort) + q = current_sort.pop() + + # "restores" all edges (q, x) + # NOTE: it is important to iterate over edges instead + # of successors, so count is updated correctly in multigraphs + for _, j in G.out_edges(q): + count[j] += 1 + assert count[j] >= 0 + # remove entries from D + while len(D) > 0 and count[D[-1]] > 0: + D.pop() + + # corresponds to a circular shift of the values in D + # if the first value chosen (the base) is in the first + # position of D again, we are done and need to consider the + # previous condition + D.appendleft(q) + if D[-1] == bases[-1]: + # all possible values have been chosen at current position + # remove corresponding marker + bases.pop() + else: + # there are still elements that have not been fixed + # at the current position in the topological sort + # stop removing elements, escape inner loop + break + + else: + if len(D) == 0: + raise nx.NetworkXUnfeasible("Graph contains a cycle.") + + # choose next node + q = D.pop() + # "erase" all edges (q, x) + # NOTE: it is important to iterate over edges instead + # of successors, so count is updated correctly in multigraphs + for _, j in G.out_edges(q): + count[j] -= 1 + assert count[j] >= 0 + if count[j] == 0: + D.append(j) + current_sort.append(q) + + # base for current position might _not_ be fixed yet + if len(bases) < len(current_sort): + bases.append(q) + + if len(bases) == 0: + break + + +@nx._dispatchable +def is_aperiodic(G): + """Returns True if `G` is aperiodic. + + A strongly connected directed graph is aperiodic if there is no integer ``k > 1`` + that divides the length of every cycle in the graph. + + This function requires the graph `G` to be strongly connected and will raise + an error if it's not. For graphs that are not strongly connected, you should + first identify their strongly connected components + (using :func:`~networkx.algorithms.components.strongly_connected_components`) + or attracting components + (using :func:`~networkx.algorithms.components.attracting_components`), + and then apply this function to those individual components. + + Parameters + ---------- + G : NetworkX DiGraph + A directed graph + + Returns + ------- + bool + True if the graph is aperiodic False otherwise + + Raises + ------ + NetworkXError + If `G` is not directed + NetworkXError + If `G` is not strongly connected + NetworkXPointlessConcept + If `G` has no nodes + + Examples + -------- + A graph consisting of one cycle, the length of which is 2. Therefore ``k = 2`` + divides the length of every cycle in the graph and thus the graph + is *not aperiodic*:: + + >>> DG = nx.DiGraph([(1, 2), (2, 1)]) + >>> nx.is_aperiodic(DG) + False + + A graph consisting of two cycles: one of length 2 and the other of length 3. + The cycle lengths are coprime, so there is no single value of k where ``k > 1`` + that divides each cycle length and therefore the graph is *aperiodic*:: + + >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1), (1, 4), (4, 1)]) + >>> nx.is_aperiodic(DG) + True + + A graph created from cycles of the same length can still be aperiodic since + the cycles can overlap and form new cycles of different lengths. For example, + the following graph contains a cycle ``[4, 2, 3, 1]`` of length 4, which is coprime + with the explicitly added cycles of length 3, so the graph is aperiodic:: + + >>> DG = nx.DiGraph() + >>> nx.add_cycle(DG, [1, 2, 3]) + >>> nx.add_cycle(DG, [2, 1, 4]) + >>> nx.is_aperiodic(DG) + True + + A single-node graph's aperiodicity depends on whether it has a self-loop: + it is aperiodic if a self-loop exists, and periodic otherwise:: + + >>> G = nx.DiGraph() + >>> G.add_node(1) + >>> nx.is_aperiodic(G) + False + >>> G.add_edge(1, 1) + >>> nx.is_aperiodic(G) + True + + A Markov chain can be modeled as a directed graph, with nodes representing + states and edges representing transitions with non-zero probability. + Aperiodicity is typically considered for irreducible Markov chains, + which are those that are *strongly connected* as graphs. + + The following Markov chain is irreducible and aperiodic, and thus + ergodic. It is guaranteed to have a unique stationary distribution:: + + >>> G = nx.DiGraph() + >>> nx.add_cycle(G, [1, 2, 3, 4]) + >>> G.add_edge(1, 3) + >>> nx.is_aperiodic(G) + True + + Reducible Markov chains can sometimes have a unique stationary distribution. + This occurs if the chain has exactly one closed communicating class and + that class itself is aperiodic (see [1]_). You can use + :func:`~networkx.algorithms.components.attracting_components` + to find these closed communicating classes:: + + >>> G = nx.DiGraph([(1, 3), (2, 3)]) + >>> nx.add_cycle(G, [3, 4, 5, 6]) + >>> nx.add_cycle(G, [3, 5, 6]) + >>> communicating_classes = list(nx.strongly_connected_components(G)) + >>> len(communicating_classes) + 3 + >>> closed_communicating_classes = list(nx.attracting_components(G)) + >>> len(closed_communicating_classes) + 1 + >>> nx.is_aperiodic(G.subgraph(closed_communicating_classes[0])) + True + + Notes + ----- + This uses the method outlined in [1]_, which runs in $O(m)$ time + given $m$ edges in `G`. + + References + ---------- + .. [1] Jarvis, J. P.; Shier, D. R. (1996), + "Graph-theoretic analysis of finite Markov chains," + in Shier, D. R.; Wallenius, K. T., Applied Mathematical Modeling: + A Multidisciplinary Approach, CRC Press. + """ + if not G.is_directed(): + raise nx.NetworkXError("is_aperiodic not defined for undirected graphs") + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") + if not nx.is_strongly_connected(G): + raise nx.NetworkXError("Graph is not strongly connected.") + s = arbitrary_element(G) + levels = {s: 0} + this_level = [s] + g = 0 + lev = 1 + while this_level: + next_level = [] + for u in this_level: + for v in G[u]: + if v in levels: # Non-Tree Edge + g = gcd(g, levels[u] - levels[v] + 1) + else: # Tree Edge + next_level.append(v) + levels[v] = lev + this_level = next_level + lev += 1 + return g == 1 + + +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def transitive_closure(G, reflexive=False): + """Returns transitive closure of a graph + + The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that + for all v, w in V there is an edge (v, w) in E+ if and only if there + is a path from v to w in G. + + Handling of paths from v to v has some flexibility within this definition. + A reflexive transitive closure creates a self-loop for the path + from v to v of length 0. The usual transitive closure creates a + self-loop only if a cycle exists (a path from v to v with length > 0). + We also allow an option for no self-loops. + + Parameters + ---------- + G : NetworkX Graph + A directed/undirected graph/multigraph. + reflexive : Bool or None, optional (default: False) + Determines when cycles create self-loops in the Transitive Closure. + If True, trivial cycles (length 0) create self-loops. The result + is a reflexive transitive closure of G. + If False (the default) non-trivial cycles create self-loops. + If None, self-loops are not created. + + Returns + ------- + NetworkX graph + The transitive closure of `G` + + Raises + ------ + NetworkXError + If `reflexive` not in `{None, True, False}` + + Examples + -------- + The treatment of trivial (i.e. length 0) cycles is controlled by the + `reflexive` parameter. + + Trivial (i.e. length 0) cycles do not create self-loops when + ``reflexive=False`` (the default):: + + >>> DG = nx.DiGraph([(1, 2), (2, 3)]) + >>> TC = nx.transitive_closure(DG, reflexive=False) + >>> TC.edges() + OutEdgeView([(1, 2), (1, 3), (2, 3)]) + + However, nontrivial (i.e. length greater than 0) cycles create self-loops + when ``reflexive=False`` (the default):: + + >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + >>> TC = nx.transitive_closure(DG, reflexive=False) + >>> TC.edges() + OutEdgeView([(1, 2), (1, 3), (1, 1), (2, 3), (2, 1), (2, 2), (3, 1), (3, 2), (3, 3)]) + + Trivial cycles (length 0) create self-loops when ``reflexive=True``:: + + >>> DG = nx.DiGraph([(1, 2), (2, 3)]) + >>> TC = nx.transitive_closure(DG, reflexive=True) + >>> TC.edges() + OutEdgeView([(1, 2), (1, 1), (1, 3), (2, 3), (2, 2), (3, 3)]) + + And the third option is not to create self-loops at all when ``reflexive=None``:: + + >>> DG = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + >>> TC = nx.transitive_closure(DG, reflexive=None) + >>> TC.edges() + OutEdgeView([(1, 2), (1, 3), (2, 3), (2, 1), (3, 1), (3, 2)]) + + References + ---------- + .. [1] https://www.ics.uci.edu/~eppstein/PADS/PartialOrder.py + """ + TC = G.copy() + + if reflexive not in {None, True, False}: + raise nx.NetworkXError("Incorrect value for the parameter `reflexive`") + + for v in G: + if reflexive is None: + TC.add_edges_from((v, u) for u in nx.descendants(G, v) if u not in TC[v]) + elif reflexive is True: + TC.add_edges_from( + (v, u) for u in nx.descendants(G, v) | {v} if u not in TC[v] + ) + elif reflexive is False: + TC.add_edges_from((v, e[1]) for e in nx.edge_bfs(G, v) if e[1] not in TC[v]) + + return TC + + +@not_implemented_for("undirected") +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def transitive_closure_dag(G, topo_order=None): + """Returns the transitive closure of a directed acyclic graph. + + This function is faster than the function `transitive_closure`, but fails + if the graph has a cycle. + + The transitive closure of G = (V,E) is a graph G+ = (V,E+) such that + for all v, w in V there is an edge (v, w) in E+ if and only if there + is a non-null path from v to w in G. + + Parameters + ---------- + G : NetworkX DiGraph + A directed acyclic graph (DAG) + + topo_order: list or tuple, optional + A topological order for G (if None, the function will compute one) + + Returns + ------- + NetworkX DiGraph + The transitive closure of `G` + + Raises + ------ + NetworkXNotImplemented + If `G` is not directed + NetworkXUnfeasible + If `G` has a cycle + + Examples + -------- + >>> DG = nx.DiGraph([(1, 2), (2, 3)]) + >>> TC = nx.transitive_closure_dag(DG) + >>> TC.edges() + OutEdgeView([(1, 2), (1, 3), (2, 3)]) + + Notes + ----- + This algorithm is probably simple enough to be well-known but I didn't find + a mention in the literature. + """ + if topo_order is None: + topo_order = list(topological_sort(G)) + + TC = G.copy() + + # idea: traverse vertices following a reverse topological order, connecting + # each vertex to its descendants at distance 2 as we go + for v in reversed(topo_order): + TC.add_edges_from((v, u) for u in nx.descendants_at_distance(TC, v, 2)) + + return TC + + +@not_implemented_for("undirected") +@nx._dispatchable(returns_graph=True) +def transitive_reduction(G): + """Returns transitive reduction of a directed graph + + The transitive reduction of G = (V,E) is a graph G- = (V,E-) such that + for all v,w in V there is an edge (v,w) in E- if and only if (v,w) is + in E and there is no path from v to w in G with length greater than 1. + + Parameters + ---------- + G : NetworkX DiGraph + A directed acyclic graph (DAG) + + Returns + ------- + NetworkX DiGraph + The transitive reduction of `G` + + Raises + ------ + NetworkXError + If `G` is not a directed acyclic graph (DAG) transitive reduction is + not uniquely defined and a :exc:`NetworkXError` exception is raised. + + Examples + -------- + To perform transitive reduction on a DiGraph: + + >>> DG = nx.DiGraph([(1, 2), (2, 3), (1, 3)]) + >>> TR = nx.transitive_reduction(DG) + >>> list(TR.edges) + [(1, 2), (2, 3)] + + To avoid unnecessary data copies, this implementation does not return a + DiGraph with node/edge data. + To perform transitive reduction on a DiGraph and transfer node/edge data: + + >>> DG = nx.DiGraph() + >>> DG.add_edges_from([(1, 2), (2, 3), (1, 3)], color="red") + >>> TR = nx.transitive_reduction(DG) + >>> TR.add_nodes_from(DG.nodes(data=True)) + >>> TR.add_edges_from((u, v, DG.edges[u, v]) for u, v in TR.edges) + >>> list(TR.edges(data=True)) + [(1, 2, {'color': 'red'}), (2, 3, {'color': 'red'})] + + References + ---------- + https://en.wikipedia.org/wiki/Transitive_reduction + + """ + if not is_directed_acyclic_graph(G): + msg = "Directed Acyclic Graph required for transitive_reduction" + raise nx.NetworkXError(msg) + TR = nx.DiGraph() + TR.add_nodes_from(G.nodes()) + descendants = {} + # count before removing set stored in descendants + check_count = dict(G.in_degree) + for u in G: + u_nbrs = set(G[u]) + for v in G[u]: + if v in u_nbrs: + if v not in descendants: + descendants[v] = {y for x, y in nx.dfs_edges(G, v)} + u_nbrs -= descendants[v] + check_count[v] -= 1 + if check_count[v] == 0: + del descendants[v] + TR.add_edges_from((u, v) for v in u_nbrs) + return TR + + +@not_implemented_for("undirected") +@nx._dispatchable +def antichains(G, topo_order=None): + """Generates antichains from a directed acyclic graph (DAG). + + An antichain is a subset of a partially ordered set such that any + two elements in the subset are incomparable. + + Parameters + ---------- + G : NetworkX DiGraph + A directed acyclic graph (DAG) + + topo_order: list or tuple, optional + A topological order for G (if None, the function will compute one) + + Yields + ------ + antichain : list + a list of nodes in `G` representing an antichain + + Raises + ------ + NetworkXNotImplemented + If `G` is not directed + + NetworkXUnfeasible + If `G` contains a cycle + + Examples + -------- + >>> DG = nx.DiGraph([(1, 2), (1, 3)]) + >>> list(nx.antichains(DG)) + [[], [3], [2], [2, 3], [1]] + + Notes + ----- + This function was originally developed by Peter Jipsen and Franco Saliola + for the SAGE project. It's included in NetworkX with permission from the + authors. Original SAGE code at: + + https://github.com/sagemath/sage/blob/master/src/sage/combinat/posets/hasse_diagram.py + + References + ---------- + .. [1] Free Lattices, by R. Freese, J. Jezek and J. B. Nation, + AMS, Vol 42, 1995, p. 226. + """ + if topo_order is None: + topo_order = list(nx.topological_sort(G)) + + TC = nx.transitive_closure_dag(G, topo_order) + antichains_stacks = [([], list(reversed(topo_order)))] + + while antichains_stacks: + (antichain, stack) = antichains_stacks.pop() + # Invariant: + # - the elements of antichain are independent + # - the elements of stack are independent from those of antichain + yield antichain + while stack: + x = stack.pop() + new_antichain = antichain + [x] + new_stack = [t for t in stack if not ((t in TC[x]) or (x in TC[t]))] + antichains_stacks.append((new_antichain, new_stack)) + + +@not_implemented_for("undirected") +@nx._dispatchable(edge_attrs={"weight": "default_weight"}) +def dag_longest_path(G, weight="weight", default_weight=1, topo_order=None): + """Returns the longest path in a directed acyclic graph (DAG). + + If `G` has edges with `weight` attribute the edge data are used as + weight values. + + Parameters + ---------- + G : NetworkX DiGraph + A directed acyclic graph (DAG) + + weight : str, optional + Edge data key to use for weight + + default_weight : int, optional + The weight of edges that do not have a weight attribute + + topo_order: list or tuple, optional + A topological order for `G` (if None, the function will compute one) + + Returns + ------- + list + Longest path + + Raises + ------ + NetworkXNotImplemented + If `G` is not directed + + Examples + -------- + >>> DG = nx.DiGraph( + ... [(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})] + ... ) + >>> list(nx.all_simple_paths(DG, 0, 2)) + [[0, 1, 2], [0, 2]] + >>> nx.dag_longest_path(DG) + [0, 1, 2] + >>> nx.dag_longest_path(DG, weight="cost") + [0, 2] + + In the case where multiple valid topological orderings exist, `topo_order` + can be used to specify a specific ordering: + + >>> DG = nx.DiGraph([(0, 1), (0, 2)]) + >>> sorted(nx.all_topological_sorts(DG)) # Valid topological orderings + [[0, 1, 2], [0, 2, 1]] + >>> nx.dag_longest_path(DG, topo_order=[0, 1, 2]) + [0, 1] + >>> nx.dag_longest_path(DG, topo_order=[0, 2, 1]) + [0, 2] + + See also + -------- + dag_longest_path_length + + """ + if not G: + return [] + + if topo_order is None: + topo_order = nx.topological_sort(G) + + dist = {} # stores {v : (length, u)} + for v in topo_order: + us = [ + ( + dist[u][0] + + ( + max(data.values(), key=lambda x: x.get(weight, default_weight)) + if G.is_multigraph() + else data + ).get(weight, default_weight), + u, + ) + for u, data in G.pred[v].items() + ] + + # Use the best predecessor if there is one and its distance is + # non-negative, otherwise terminate. + maxu = max(us, key=lambda x: x[0]) if us else (0, v) + dist[v] = maxu if maxu[0] >= 0 else (0, v) + + u = None + v = max(dist, key=lambda x: dist[x][0]) + path = [] + while u != v: + path.append(v) + u = v + v = dist[v][1] + + path.reverse() + return path + + +@not_implemented_for("undirected") +@nx._dispatchable(edge_attrs={"weight": "default_weight"}) +def dag_longest_path_length(G, weight="weight", default_weight=1): + """Returns the longest path length in a DAG + + Parameters + ---------- + G : NetworkX DiGraph + A directed acyclic graph (DAG) + + weight : string, optional + Edge data key to use for weight + + default_weight : int, optional + The weight of edges that do not have a weight attribute + + Returns + ------- + int + Longest path length + + Raises + ------ + NetworkXNotImplemented + If `G` is not directed + + Examples + -------- + >>> DG = nx.DiGraph( + ... [(0, 1, {"cost": 1}), (1, 2, {"cost": 1}), (0, 2, {"cost": 42})] + ... ) + >>> list(nx.all_simple_paths(DG, 0, 2)) + [[0, 1, 2], [0, 2]] + >>> nx.dag_longest_path_length(DG) + 2 + >>> nx.dag_longest_path_length(DG, weight="cost") + 42 + + See also + -------- + dag_longest_path + """ + path = nx.dag_longest_path(G, weight, default_weight) + path_length = 0 + if G.is_multigraph(): + for u, v in pairwise(path): + i = max(G[u][v], key=lambda x: G[u][v][x].get(weight, default_weight)) + path_length += G[u][v][i].get(weight, default_weight) + else: + for u, v in pairwise(path): + path_length += G[u][v].get(weight, default_weight) + + return path_length + + +@nx._dispatchable +def root_to_leaf_paths(G): + """Yields root-to-leaf paths in a directed acyclic graph. + + `G` must be a directed acyclic graph. If not, the behavior of this + function is undefined. A "root" in this graph is a node of in-degree + zero and a "leaf" a node of out-degree zero. + + When invoked, this function iterates over each path from any root to + any leaf. A path is a list of nodes. + + """ + roots = (v for v, d in G.in_degree() if d == 0) + leaves = (v for v, d in G.out_degree() if d == 0) + all_paths = partial(nx.all_simple_paths, G) + # TODO In Python 3, this would be better as `yield from ...`. + return chaini(starmap(all_paths, product(roots, leaves))) + + +@not_implemented_for("multigraph") +@not_implemented_for("undirected") +@nx._dispatchable(returns_graph=True) +def dag_to_branching(G): + """Returns a branching representing all (overlapping) paths from + root nodes to leaf nodes in the given directed acyclic graph. + + As described in :mod:`networkx.algorithms.tree.recognition`, a + *branching* is a directed forest in which each node has at most one + parent. In other words, a branching is a disjoint union of + *arborescences*. For this function, each node of in-degree zero in + `G` becomes a root of one of the arborescences, and there will be + one leaf node for each distinct path from that root to a leaf node + in `G`. + + Each node `v` in `G` with *k* parents becomes *k* distinct nodes in + the returned branching, one for each parent, and the sub-DAG rooted + at `v` is duplicated for each copy. The algorithm then recurses on + the children of each copy of `v`. + + Parameters + ---------- + G : NetworkX graph + A directed acyclic graph. + + Returns + ------- + DiGraph + The branching in which there is a bijection between root-to-leaf + paths in `G` (in which multiple paths may share the same leaf) + and root-to-leaf paths in the branching (in which there is a + unique path from a root to a leaf). + + Each node has an attribute 'source' whose value is the original + node to which this node corresponds. No other graph, node, or + edge attributes are copied into this new graph. + + Raises + ------ + NetworkXNotImplemented + If `G` is not directed, or if `G` is a multigraph. + + HasACycle + If `G` is not acyclic. + + Examples + -------- + To examine which nodes in the returned branching were produced by + which original node in the directed acyclic graph, we can collect + the mapping from source node to new nodes into a dictionary. For + example, consider the directed diamond graph:: + + >>> from collections import defaultdict + >>> from operator import itemgetter + >>> + >>> G = nx.DiGraph(nx.utils.pairwise("abd")) + >>> G.add_edges_from(nx.utils.pairwise("acd")) + >>> B = nx.dag_to_branching(G) + >>> + >>> sources = defaultdict(set) + >>> for v, source in B.nodes(data="source"): + ... sources[source].add(v) + >>> len(sources["a"]) + 1 + >>> len(sources["d"]) + 2 + + To copy node attributes from the original graph to the new graph, + you can use a dictionary like the one constructed in the above + example:: + + >>> for source, nodes in sources.items(): + ... for v in nodes: + ... B.nodes[v].update(G.nodes[source]) + + Notes + ----- + This function is not idempotent in the sense that the node labels in + the returned branching may be uniquely generated each time the + function is invoked. In fact, the node labels may not be integers; + in order to relabel the nodes to be more readable, you can use the + :func:`networkx.convert_node_labels_to_integers` function. + + The current implementation of this function uses + :func:`networkx.prefix_tree`, so it is subject to the limitations of + that function. + + """ + if has_cycle(G): + msg = "dag_to_branching is only defined for acyclic graphs" + raise nx.HasACycle(msg) + paths = root_to_leaf_paths(G) + B = nx.prefix_tree(paths) + # Remove the synthetic `root`(0) and `NIL`(-1) nodes from the tree + B.remove_node(0) + B.remove_node(-1) + return B + + +@not_implemented_for("undirected") +@nx._dispatchable +def compute_v_structures(G): + """Yields 3-node tuples that represent the v-structures in `G`. + + .. deprecated:: 3.4 + + `compute_v_structures` actually yields colliders. It will be removed in + version 3.6. Use `nx.dag.v_structures` or `nx.dag.colliders` instead. + + Colliders are triples in the directed acyclic graph (DAG) where two parent nodes + point to the same child node. V-structures are colliders where the two parent + nodes are not adjacent. In a causal graph setting, the parents do not directly + depend on each other, but conditioning on the child node provides an association. + + Parameters + ---------- + G : graph + A networkx `~networkx.DiGraph`. + + Yields + ------ + A 3-tuple representation of a v-structure + Each v-structure is a 3-tuple with the parent, collider, and other parent. + + Raises + ------ + NetworkXNotImplemented + If `G` is an undirected graph. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)]) + >>> nx.is_directed_acyclic_graph(G) + True + >>> list(nx.compute_v_structures(G)) + [(0, 4, 2), (0, 5, 4), (0, 5, 1), (4, 5, 1)] + + See Also + -------- + v_structures + colliders + + Notes + ----- + This function was written to be used on DAGs, however it works on cyclic graphs + too. Since colliders are referred to in the cyclic causal graph literature + [2]_ we allow cyclic graphs in this function. It is suggested that you test if + your input graph is acyclic as in the example if you want that property. + + References + ---------- + .. [1] `Pearl's PRIMER `_ + Ch-2 page 50: v-structures def. + .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013) + "Discovering cyclic causal models with latent variables: + a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth + Conference on Uncertainty in Artificial Intelligence, pg 301–310, + `doi:10.5555/3023638.3023669 `_ + """ + import warnings + + warnings.warn( + ( + "\n\n`compute_v_structures` actually yields colliders. It will be\n" + "removed in version 3.6. Use `nx.dag.v_structures` or `nx.dag.colliders`\n" + "instead.\n" + ), + category=DeprecationWarning, + stacklevel=5, + ) + + return colliders(G) + + +@not_implemented_for("undirected") +@nx._dispatchable +def v_structures(G): + """Yields 3-node tuples that represent the v-structures in `G`. + + Colliders are triples in the directed acyclic graph (DAG) where two parent nodes + point to the same child node. V-structures are colliders where the two parent + nodes are not adjacent. In a causal graph setting, the parents do not directly + depend on each other, but conditioning on the child node provides an association. + + Parameters + ---------- + G : graph + A networkx `~networkx.DiGraph`. + + Yields + ------ + A 3-tuple representation of a v-structure + Each v-structure is a 3-tuple with the parent, collider, and other parent. + + Raises + ------ + NetworkXNotImplemented + If `G` is an undirected graph. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)]) + >>> nx.is_directed_acyclic_graph(G) + True + >>> list(nx.dag.v_structures(G)) + [(0, 4, 2), (0, 5, 1), (4, 5, 1)] + + See Also + -------- + colliders + + Notes + ----- + This function was written to be used on DAGs, however it works on cyclic graphs + too. Since colliders are referred to in the cyclic causal graph literature + [2]_ we allow cyclic graphs in this function. It is suggested that you test if + your input graph is acyclic as in the example if you want that property. + + References + ---------- + .. [1] `Pearl's PRIMER `_ + Ch-2 page 50: v-structures def. + .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013) + "Discovering cyclic causal models with latent variables: + a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth + Conference on Uncertainty in Artificial Intelligence, pg 301–310, + `doi:10.5555/3023638.3023669 `_ + """ + for p1, c, p2 in colliders(G): + if not (G.has_edge(p1, p2) or G.has_edge(p2, p1)): + yield (p1, c, p2) + + +@not_implemented_for("undirected") +@nx._dispatchable +def colliders(G): + """Yields 3-node tuples that represent the colliders in `G`. + + In a Directed Acyclic Graph (DAG), if you have three nodes A, B, and C, and + there are edges from A to C and from B to C, then C is a collider [1]_ . In + a causal graph setting, this means that both events A and B are "causing" C, + and conditioning on C provide an association between A and B even if + no direct causal relationship exists between A and B. + + Parameters + ---------- + G : graph + A networkx `~networkx.DiGraph`. + + Yields + ------ + A 3-tuple representation of a collider + Each collider is a 3-tuple with the parent, collider, and other parent. + + Raises + ------ + NetworkXNotImplemented + If `G` is an undirected graph. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (0, 4), (3, 1), (2, 4), (0, 5), (4, 5), (1, 5)]) + >>> nx.is_directed_acyclic_graph(G) + True + >>> list(nx.dag.colliders(G)) + [(0, 4, 2), (0, 5, 4), (0, 5, 1), (4, 5, 1)] + + See Also + -------- + v_structures + + Notes + ----- + This function was written to be used on DAGs, however it works on cyclic graphs + too. Since colliders are referred to in the cyclic causal graph literature + [2]_ we allow cyclic graphs in this function. It is suggested that you test if + your input graph is acyclic as in the example if you want that property. + + References + ---------- + .. [1] `Wikipedia: Collider in causal graphs `_ + .. [2] A Hyttinen, P.O. Hoyer, F. Eberhardt, M J ̈arvisalo, (2013) + "Discovering cyclic causal models with latent variables: + a general SAT-based procedure", UAI'13: Proceedings of the Twenty-Ninth + Conference on Uncertainty in Artificial Intelligence, pg 301–310, + `doi:10.5555/3023638.3023669 `_ + """ + for node in G.nodes: + for p1, p2 in combinations(G.predecessors(node), 2): + yield (p1, node, p2) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_measures.py new file mode 100644 index 0000000..8ce3fb0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_measures.py @@ -0,0 +1,1159 @@ +"""Graph diameter, radius, eccentricity and other properties.""" + +import math + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "eccentricity", + "diameter", + "harmonic_diameter", + "radius", + "periphery", + "center", + "barycenter", + "resistance_distance", + "kemeny_constant", + "effective_graph_resistance", +] + + +@not_implemented_for("directed") +def _tree_center(G): + """Returns the center of an undirected tree graph. + + The center of a tree consists of nodes that minimize the maximum eccentricity. + That is, these nodes minimize the maximum distance to all other nodes. + This implementation currently only works for unweighted edges. + + If the input graph is not a tree, results are not guaranteed to be correct and while + some non-trees will raise a ``NetworkXError`` not all non-trees will be discovered. + Thus, this function should not be used if caller is unsure whether the input graph + is a tree. Use ``networkx.is_tree(G)`` to check. + + Parameters + ---------- + G : NetworkX graph + A tree graph (undirected, acyclic graph). + + Returns + ------- + center : list + A list of nodes in the center of the tree. This could be one or two nodes. + + Raises + ------ + NetworkXError + If algorithm detects input graph is not a tree. There is no guarantee + this error will always raise if a non-tree is passed. + + Notes + ----- + This algorithm iteratively removes leaves (nodes with degree 1) from the tree until + there are only 1 or 2 nodes left. The remaining nodes form the center of the tree. + + This algorithm's time complexity is O(N) where N is the number of nodes in the tree. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 4), (2, 5)]) + >>> _tree_center(G) + [1, 2] + + >>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 5)]) + >>> _tree_center(G) + [3] + """ + center_candidates_degree = dict(G.degree) + leaves = {node for node, degree in center_candidates_degree.items() if degree == 1} + + # It's better to fail than an infinite loop, so check leaves to ensure progress + while len(center_candidates_degree) > 2 and leaves: + new_leaves = set() + for leaf in leaves: + del center_candidates_degree[leaf] + for neighbor in G.neighbors(leaf): + if neighbor not in center_candidates_degree: + continue + center_candidates_degree[neighbor] -= 1 + if center_candidates_degree[neighbor] == 1: + new_leaves.add(neighbor) + leaves = new_leaves + + if not leaves and len(center_candidates_degree) >= 2: + # We detected graph is not a tree. This check does not cover all cases. + # For example, it does not cover the case where we have two islands (A-B) and (B-C) + # where we might eliminate (B-C) leaves and return [A, B] as centers. + raise nx.NetworkXError("Input graph is not a tree") + + return list(center_candidates_degree) + + +def _extrema_bounding(G, compute="diameter", weight=None): + """Compute requested extreme distance metric of undirected graph G + + Computation is based on smart lower and upper bounds, and in practice + linear in the number of nodes, rather than quadratic (except for some + border cases such as complete graphs or circle shaped graphs). + + Parameters + ---------- + G : NetworkX graph + An undirected graph + + compute : string denoting the requesting metric + "diameter" for the maximal eccentricity value, + "radius" for the minimal eccentricity value, + "periphery" for the set of nodes with eccentricity equal to the diameter, + "center" for the set of nodes with eccentricity equal to the radius, + "eccentricities" for the maximum distance from each node to all other nodes in G + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + value : value of the requested metric + int for "diameter" and "radius" or + list of nodes for "center" and "periphery" or + dictionary of eccentricity values keyed by node for "eccentricities" + + Raises + ------ + NetworkXError + If the graph consists of multiple components + ValueError + If `compute` is not one of "diameter", "radius", "periphery", "center", or "eccentricities". + + Notes + ----- + This algorithm was proposed in [1]_ and discussed further in [2]_ and [3]_. + + References + ---------- + .. [1] F. W. Takes, W. A. Kosters, + "Determining the diameter of small world networks." + Proceedings of the 20th ACM international conference on Information and + knowledge management, 2011 + https://dl.acm.org/doi/abs/10.1145/2063576.2063748 + .. [2] F. W. Takes, W. A. Kosters, + "Computing the Eccentricity Distribution of Large Graphs." + Algorithms, 2013 + https://www.mdpi.com/1999-4893/6/1/100 + .. [3] M. Borassi, P. Crescenzi, M. Habib, W. A. Kosters, A. Marino, F. W. Takes, + "Fast diameter and radius BFS-based computation in (weakly connected) + real-world graphs: With an application to the six degrees of separation + games." + Theoretical Computer Science, 2015 + https://www.sciencedirect.com/science/article/pii/S0304397515001644 + """ + # init variables + degrees = dict(G.degree()) # start with the highest degree node + minlowernode = max(degrees, key=degrees.get) + N = len(degrees) # number of nodes + # alternate between smallest lower and largest upper bound + high = False + # status variables + ecc_lower = dict.fromkeys(G, 0) + ecc_upper = dict.fromkeys(G, math.inf) + candidates = set(G) + + # (re)set bound extremes + minlower = math.inf + maxlower = 0 + minupper = math.inf + maxupper = 0 + + # repeat the following until there are no more candidates + while candidates: + if high: + current = maxuppernode # select node with largest upper bound + else: + current = minlowernode # select node with smallest lower bound + high = not high + + # get distances from/to current node and derive eccentricity + dist = nx.shortest_path_length(G, source=current, weight=weight) + + if len(dist) != N: + msg = "Cannot compute metric because graph is not connected." + raise nx.NetworkXError(msg) + current_ecc = max(dist.values()) + + # print status update + # print ("ecc of " + str(current) + " (" + str(ecc_lower[current]) + "/" + # + str(ecc_upper[current]) + ", deg: " + str(dist[current]) + ") is " + # + str(current_ecc)) + # print(ecc_upper) + + # (re)set bound extremes + maxuppernode = None + minlowernode = None + + # update node bounds + for i in candidates: + # update eccentricity bounds + d = dist[i] + ecc_lower[i] = low = max(ecc_lower[i], max(d, (current_ecc - d))) + ecc_upper[i] = upp = min(ecc_upper[i], current_ecc + d) + + # update min/max values of lower and upper bounds + minlower = min(ecc_lower[i], minlower) + maxlower = max(ecc_lower[i], maxlower) + minupper = min(ecc_upper[i], minupper) + maxupper = max(ecc_upper[i], maxupper) + + # update candidate set + if compute == "diameter": + ruled_out = { + i + for i in candidates + if ecc_upper[i] <= maxlower and 2 * ecc_lower[i] >= maxupper + } + elif compute == "radius": + ruled_out = { + i + for i in candidates + if ecc_lower[i] >= minupper and ecc_upper[i] + 1 <= 2 * minlower + } + elif compute == "periphery": + ruled_out = { + i + for i in candidates + if ecc_upper[i] < maxlower + and (maxlower == maxupper or ecc_lower[i] > maxupper) + } + elif compute == "center": + ruled_out = { + i + for i in candidates + if ecc_lower[i] > minupper + and (minlower == minupper or ecc_upper[i] + 1 < 2 * minlower) + } + elif compute == "eccentricities": + ruled_out = set() + else: + msg = "compute must be one of 'diameter', 'radius', 'periphery', 'center', 'eccentricities'" + raise ValueError(msg) + + ruled_out.update(i for i in candidates if ecc_lower[i] == ecc_upper[i]) + candidates -= ruled_out + + # for i in ruled_out: + # print("removing %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% + # (i,ecc_upper[i],maxlower,ecc_lower[i],maxupper)) + # print("node %g: ecc_u: %g maxl: %g ecc_l: %g maxu: %g"% + # (4,ecc_upper[4],maxlower,ecc_lower[4],maxupper)) + # print("NODE 4: %g"%(ecc_upper[4] <= maxlower)) + # print("NODE 4: %g"%(2 * ecc_lower[4] >= maxupper)) + # print("NODE 4: %g"%(ecc_upper[4] <= maxlower + # and 2 * ecc_lower[4] >= maxupper)) + + # updating maxuppernode and minlowernode for selection in next round + for i in candidates: + if ( + minlowernode is None + or ( + ecc_lower[i] == ecc_lower[minlowernode] + and degrees[i] > degrees[minlowernode] + ) + or (ecc_lower[i] < ecc_lower[minlowernode]) + ): + minlowernode = i + + if ( + maxuppernode is None + or ( + ecc_upper[i] == ecc_upper[maxuppernode] + and degrees[i] > degrees[maxuppernode] + ) + or (ecc_upper[i] > ecc_upper[maxuppernode]) + ): + maxuppernode = i + + # print status update + # print (" min=" + str(minlower) + "/" + str(minupper) + + # " max=" + str(maxlower) + "/" + str(maxupper) + + # " candidates: " + str(len(candidates))) + # print("cand:",candidates) + # print("ecc_l",ecc_lower) + # print("ecc_u",ecc_upper) + # wait = input("press Enter to continue") + + # return the correct value of the requested metric + if compute == "diameter": + return maxlower + if compute == "radius": + return minupper + if compute == "periphery": + p = [v for v in G if ecc_lower[v] == maxlower] + return p + if compute == "center": + c = [v for v in G if ecc_upper[v] == minupper] + return c + if compute == "eccentricities": + return ecc_lower + return None + + +@nx._dispatchable(edge_attrs="weight") +def eccentricity(G, v=None, sp=None, weight=None): + """Returns the eccentricity of nodes in G. + + The eccentricity of a node v is the maximum distance from v to + all other nodes in G. + + Parameters + ---------- + G : NetworkX graph + A graph + + v : node, optional + Return value of specified node + + sp : dict of dicts, optional + All pairs shortest path lengths as a dictionary of dictionaries + + weight : string, function, or None (default=None) + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + ecc : dictionary + A dictionary of eccentricity values keyed by node. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> dict(nx.eccentricity(G)) + {1: 2, 2: 3, 3: 2, 4: 2, 5: 3} + + >>> dict( + ... nx.eccentricity(G, v=[1, 5]) + ... ) # This returns the eccentricity of node 1 & 5 + {1: 2, 5: 3} + + """ + # if v is None: # none, use entire graph + # nodes=G.nodes() + # elif v in G: # is v a single node + # nodes=[v] + # else: # assume v is a container of nodes + # nodes=v + order = G.order() + e = {} + for n in G.nbunch_iter(v): + if sp is None: + length = nx.shortest_path_length(G, source=n, weight=weight) + + L = len(length) + else: + try: + length = sp[n] + L = len(length) + except TypeError as err: + raise nx.NetworkXError('Format of "sp" is invalid.') from err + if L != order: + if G.is_directed(): + msg = ( + "Found infinite path length because the digraph is not" + " strongly connected" + ) + else: + msg = "Found infinite path length because the graph is not connected" + raise nx.NetworkXError(msg) + + e[n] = max(length.values()) + + if v in G: + return e[v] # return single value + return e + + +@nx._dispatchable(edge_attrs="weight") +def diameter(G, e=None, usebounds=False, weight=None): + """Returns the diameter of the graph G. + + The diameter is the maximum eccentricity. + + Parameters + ---------- + G : NetworkX graph + A graph + + e : eccentricity dictionary, optional + A precomputed dictionary of eccentricities. + + usebounds : bool, optional + If `True`, use extrema bounding (see Notes) when computing the diameter + for undirected graphs. Extrema bounding may accelerate the + distance calculation for some graphs. `usebounds` is ignored if `G` is + directed or if `e` is not `None`. Default is `False`. + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + d : integer + Diameter of graph + + Notes + ----- + When ``usebounds=True``, the computation makes use of smart lower + and upper bounds and is often linear in the number of nodes, rather than + quadratic (except for some border cases such as complete graphs or circle + shaped-graphs). + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> nx.diameter(G) + 3 + + See Also + -------- + eccentricity + """ + if usebounds is True and e is None and not G.is_directed(): + return _extrema_bounding(G, compute="diameter", weight=weight) + if e is None: + e = eccentricity(G, weight=weight) + return max(e.values()) + + +@nx._dispatchable(edge_attrs="weight") +def harmonic_diameter(G, sp=None, *, weight=None): + """Returns the harmonic diameter of the graph G. + + The harmonic diameter of a graph is the harmonic mean of the distances + between all pairs of distinct vertices. Graphs that are not strongly + connected have infinite diameter and mean distance, making such + measures not useful. Restricting the diameter or mean distance to + finite distances yields paradoxical values (e.g., a perfect match + would have diameter one). The harmonic mean handles gracefully + infinite distances (e.g., a perfect match has harmonic diameter equal + to the number of vertices minus one), making it possible to assign a + meaningful value to all graphs. + + Note that in [1] the harmonic diameter is called "connectivity length": + however, "harmonic diameter" is a more standard name from the + theory of metric spaces. The name "harmonic mean distance" is perhaps + a more descriptive name, but is not used in the literature, so we use the + name "harmonic diameter" here. + + Parameters + ---------- + G : NetworkX graph + A graph + + sp : dict of dicts, optional + All-pairs shortest path lengths as a dictionary of dictionaries + + weight : string, function, or None (default=None) + If None, every edge has weight/distance 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + If a function, the weight of an edge is the value returned by the function. + The function must accept exactly three positional arguments: + the two endpoints of an edge and the dictionary of edge attributes for + that edge. The function must return a number. + + Returns + ------- + hd : float + Harmonic diameter of graph + + References + ---------- + .. [1] Massimo Marchiori and Vito Latora, "Harmony in the small-world". + *Physica A: Statistical Mechanics and Its Applications* + 285(3-4), pages 539-546, 2000. + + """ + order = G.order() + + sum_invd = 0 + for n in G: + if sp is None: + length = nx.single_source_dijkstra_path_length(G, n, weight=weight) + else: + try: + length = sp[n] + L = len(length) + except TypeError as err: + raise nx.NetworkXError('Format of "sp" is invalid.') from err + + for d in length.values(): + # Note that this will skip the zero distance from n to itself, + # as it should be, but also zero-weight paths in weighted graphs. + if d != 0: + sum_invd += 1 / d + + if sum_invd != 0: + return order * (order - 1) / sum_invd + if order > 1: + return math.inf + return math.nan + + +@nx._dispatchable(edge_attrs="weight") +def periphery(G, e=None, usebounds=False, weight=None): + """Returns the periphery of the graph G. + + The periphery is the set of nodes with eccentricity equal to the diameter. + + Parameters + ---------- + G : NetworkX graph + A graph + + e : eccentricity dictionary, optional + A precomputed dictionary of eccentricities. + + usebounds : bool, optional + If `True`, use extrema bounding (see Notes) when computing the periphery + for undirected graphs. Extrema bounding may accelerate the + distance calculation for some graphs. `usebounds` is ignored if `G` is + directed or if `e` is not `None`. Default is `False`. + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + p : list + List of nodes in periphery + + Notes + ----- + When ``usebounds=True``, the computation makes use of smart lower + and upper bounds and is often linear in the number of nodes, rather than + quadratic (except for some border cases such as complete graphs or circle + shaped-graphs). + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> nx.periphery(G) + [2, 5] + + See Also + -------- + barycenter + center + """ + if usebounds is True and e is None and not G.is_directed(): + return _extrema_bounding(G, compute="periphery", weight=weight) + if e is None: + e = eccentricity(G, weight=weight) + diameter = max(e.values()) + p = [v for v in e if e[v] == diameter] + return p + + +@nx._dispatchable(edge_attrs="weight") +def radius(G, e=None, usebounds=False, weight=None): + """Returns the radius of the graph G. + + The radius is the minimum eccentricity. + + Parameters + ---------- + G : NetworkX graph + A graph + + e : eccentricity dictionary, optional + A precomputed dictionary of eccentricities. + + usebounds : bool, optional + If `True`, use extrema bounding (see Notes) when computing the radius + for undirected graphs. Extrema bounding may accelerate the + distance calculation for some graphs. `usebounds` is ignored if `G` is + directed or if `e` is not `None`. Default is `False`. + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + r : integer + Radius of graph + + Notes + ----- + When ``usebounds=True``, the computation makes use of smart lower + and upper bounds and is often linear in the number of nodes, rather than + quadratic (except for some border cases such as complete graphs or circle + shaped-graphs). + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> nx.radius(G) + 2 + + """ + if usebounds is True and e is None and not G.is_directed(): + return _extrema_bounding(G, compute="radius", weight=weight) + if e is None: + e = eccentricity(G, weight=weight) + return min(e.values()) + + +@nx._dispatchable(edge_attrs="weight") +def center(G, e=None, usebounds=False, weight=None): + """Returns the center of the graph G. + + The center is the set of nodes with eccentricity equal to radius. + + Parameters + ---------- + G : NetworkX graph + A graph + + e : eccentricity dictionary, optional + A precomputed dictionary of eccentricities. + + usebounds : bool, optional + If `True`, use extrema bounding (see Notes) when computing the center + for undirected graphs. Extrema bounding may accelerate the + distance calculation for some graphs. `usebounds` is ignored if `G` is + directed or if `e` is not `None`. Default is `False`. + + weight : string, function, or None + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + If this is None, every edge has weight/distance/cost 1. + + Weights stored as floating point values can lead to small round-off + errors in distances. Use integer weights to avoid this. + + Weights should be positive, since they are distances. + + Returns + ------- + c : list + List of nodes in center + + Notes + ----- + When ``usebounds=True``, the computation makes use of smart lower + and upper bounds and is often linear in the number of nodes, rather than + quadratic (except for some border cases such as complete graphs or circle + shaped-graphs). + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> list(nx.center(G)) + [1, 3, 4] + + See Also + -------- + barycenter + periphery + """ + if usebounds is True and e is None and not G.is_directed(): + return _extrema_bounding(G, compute="center", weight=weight) + if e is None and weight is None and not G.is_directed() and nx.is_tree(G): + return _tree_center(G) + if e is None: + e = eccentricity(G, weight=weight) + radius = min(e.values()) + p = [v for v in e if e[v] == radius] + return p + + +@nx._dispatchable(edge_attrs="weight", mutates_input={"attr": 2}) +def barycenter(G, weight=None, attr=None, sp=None): + r"""Calculate barycenter of a connected graph, optionally with edge weights. + + The :dfn:`barycenter` a + :func:`connected ` graph + :math:`G` is the subgraph induced by the set of its nodes :math:`v` + minimizing the objective function + + .. math:: + + \sum_{u \in V(G)} d_G(u, v), + + where :math:`d_G` is the (possibly weighted) :func:`path length + `. + The barycenter is also called the :dfn:`median`. See [West01]_, p. 78. + + Parameters + ---------- + G : :class:`networkx.Graph` + The connected graph :math:`G`. + weight : :class:`str`, optional + Passed through to + :func:`~networkx.algorithms.shortest_paths.generic.shortest_path_length`. + attr : :class:`str`, optional + If given, write the value of the objective function to each node's + `attr` attribute. Otherwise do not store the value. + sp : dict of dicts, optional + All pairs shortest path lengths as a dictionary of dictionaries + + Returns + ------- + list + Nodes of `G` that induce the barycenter of `G`. + + Raises + ------ + NetworkXNoPath + If `G` is disconnected. `G` may appear disconnected to + :func:`barycenter` if `sp` is given but is missing shortest path + lengths for any pairs. + ValueError + If `sp` and `weight` are both given. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> nx.barycenter(G) + [1, 3, 4] + + See Also + -------- + center + periphery + """ + if sp is None: + sp = nx.shortest_path_length(G, weight=weight) + else: + sp = sp.items() + if weight is not None: + raise ValueError("Cannot use both sp, weight arguments together") + smallest, barycenter_vertices, n = float("inf"), [], len(G) + for v, dists in sp: + if len(dists) < n: + raise nx.NetworkXNoPath( + f"Input graph {G} is disconnected, so every induced subgraph " + "has infinite barycentricity." + ) + barycentricity = sum(dists.values()) + if attr is not None: + G.nodes[v][attr] = barycentricity + if barycentricity < smallest: + smallest = barycentricity + barycenter_vertices = [v] + elif barycentricity == smallest: + barycenter_vertices.append(v) + if attr is not None: + nx._clear_cache(G) + return barycenter_vertices + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def resistance_distance(G, nodeA=None, nodeB=None, weight=None, invert_weight=True): + """Returns the resistance distance between pairs of nodes in graph G. + + The resistance distance between two nodes of a graph is akin to treating + the graph as a grid of resistors with a resistance equal to the provided + weight [1]_, [2]_. + + If weight is not provided, then a weight of 1 is used for all edges. + + If two nodes are the same, the resistance distance is zero. + + Parameters + ---------- + G : NetworkX graph + A graph + + nodeA : node or None, optional (default=None) + A node within graph G. + If None, compute resistance distance using all nodes as source nodes. + + nodeB : node or None, optional (default=None) + A node within graph G. + If None, compute resistance distance using all nodes as target nodes. + + weight : string or None, optional (default=None) + The edge data key used to compute the resistance distance. + If None, then each edge has weight 1. + + invert_weight : boolean (default=True) + Proper calculation of resistance distance requires building the + Laplacian matrix with the reciprocal of the weight. Not required + if the weight is already inverted. Weight cannot be zero. + + Returns + ------- + rd : dict or float + If `nodeA` and `nodeB` are given, resistance distance between `nodeA` + and `nodeB`. If `nodeA` or `nodeB` is unspecified (the default), a + dictionary of nodes with resistance distances as the value. + + Raises + ------ + NetworkXNotImplemented + If `G` is a directed graph. + + NetworkXError + If `G` is not connected, or contains no nodes, + or `nodeA` is not in `G` or `nodeB` is not in `G`. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> round(nx.resistance_distance(G, 1, 3), 10) + 0.625 + + Notes + ----- + The implementation is based on Theorem A in [2]_. Self-loops are ignored. + Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights. + + References + ---------- + .. [1] Wikipedia + "Resistance distance." + https://en.wikipedia.org/wiki/Resistance_distance + .. [2] D. J. Klein and M. Randic. + Resistance distance. + J. of Math. Chem. 12:81-95, 1993. + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXError("Graph G must contain at least one node.") + if not nx.is_connected(G): + raise nx.NetworkXError("Graph G must be strongly connected.") + if nodeA is not None and nodeA not in G: + raise nx.NetworkXError("Node A is not in graph G.") + if nodeB is not None and nodeB not in G: + raise nx.NetworkXError("Node B is not in graph G.") + + G = G.copy() + node_list = list(G) + + # Invert weights + if invert_weight and weight is not None: + if G.is_multigraph(): + for u, v, k, d in G.edges(keys=True, data=True): + d[weight] = 1 / d[weight] + else: + for u, v, d in G.edges(data=True): + d[weight] = 1 / d[weight] + + # Compute resistance distance using the Pseudo-inverse of the Laplacian + # Self-loops are ignored + L = nx.laplacian_matrix(G, weight=weight).todense() + Linv = np.linalg.pinv(L, hermitian=True) + + # Return relevant distances + if nodeA is not None and nodeB is not None: + i = node_list.index(nodeA) + j = node_list.index(nodeB) + return Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) + + elif nodeA is not None: + i = node_list.index(nodeA) + d = {} + for n in G: + j = node_list.index(n) + d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) + return d + + elif nodeB is not None: + j = node_list.index(nodeB) + d = {} + for n in G: + i = node_list.index(n) + d[n] = Linv.item(i, i) + Linv.item(j, j) - Linv.item(i, j) - Linv.item(j, i) + return d + + else: + d = {} + for n in G: + i = node_list.index(n) + d[n] = {} + for n2 in G: + j = node_list.index(n2) + d[n][n2] = ( + Linv.item(i, i) + + Linv.item(j, j) + - Linv.item(i, j) + - Linv.item(j, i) + ) + return d + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def effective_graph_resistance(G, weight=None, invert_weight=True): + """Returns the Effective graph resistance of G. + + Also known as the Kirchhoff index. + + The effective graph resistance is defined as the sum + of the resistance distance of every node pair in G [1]_. + + If weight is not provided, then a weight of 1 is used for all edges. + + The effective graph resistance of a disconnected graph is infinite. + + Parameters + ---------- + G : NetworkX graph + A graph + + weight : string or None, optional (default=None) + The edge data key used to compute the effective graph resistance. + If None, then each edge has weight 1. + + invert_weight : boolean (default=True) + Proper calculation of resistance distance requires building the + Laplacian matrix with the reciprocal of the weight. Not required + if the weight is already inverted. Weight cannot be zero. + + Returns + ------- + RG : float + The effective graph resistance of `G`. + + Raises + ------ + NetworkXNotImplemented + If `G` is a directed graph. + + NetworkXError + If `G` does not contain any nodes. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (1, 4), (3, 4), (3, 5), (4, 5)]) + >>> round(nx.effective_graph_resistance(G), 10) + 10.25 + + Notes + ----- + The implementation is based on Theorem 2.2 in [2]_. Self-loops are ignored. + Multi-edges are contracted in one edge with weight equal to the harmonic sum of the weights. + + References + ---------- + .. [1] Wolfram + "Kirchhoff Index." + https://mathworld.wolfram.com/KirchhoffIndex.html + .. [2] W. Ellens, F. M. Spieksma, P. Van Mieghem, A. Jamakovic, R. E. Kooij. + Effective graph resistance. + Lin. Alg. Appl. 435:2491-2506, 2011. + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXError("Graph G must contain at least one node.") + + # Disconnected graphs have infinite Effective graph resistance + if not nx.is_connected(G): + return float("inf") + + # Invert weights + G = G.copy() + if invert_weight and weight is not None: + if G.is_multigraph(): + for u, v, k, d in G.edges(keys=True, data=True): + d[weight] = 1 / d[weight] + else: + for u, v, d in G.edges(data=True): + d[weight] = 1 / d[weight] + + # Get Laplacian eigenvalues + mu = np.sort(nx.laplacian_spectrum(G, weight=weight)) + + # Compute Effective graph resistance based on spectrum of the Laplacian + # Self-loops are ignored + return float(np.sum(1 / mu[1:]) * G.number_of_nodes()) + + +@nx.utils.not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def kemeny_constant(G, *, weight=None): + """Returns the Kemeny constant of the given graph. + + The *Kemeny constant* (or Kemeny's constant) of a graph `G` + can be computed by regarding the graph as a Markov chain. + The Kemeny constant is then the expected number of time steps + to transition from a starting state i to a random destination state + sampled from the Markov chain's stationary distribution. + The Kemeny constant is independent of the chosen initial state [1]_. + + The Kemeny constant measures the time needed for spreading + across a graph. Low values indicate a closely connected graph + whereas high values indicate a spread-out graph. + + If weight is not provided, then a weight of 1 is used for all edges. + + Since `G` represents a Markov chain, the weights must be positive. + + Parameters + ---------- + G : NetworkX graph + + weight : string or None, optional (default=None) + The edge data key used to compute the Kemeny constant. + If None, then each edge has weight 1. + + Returns + ------- + float + The Kemeny constant of the graph `G`. + + Raises + ------ + NetworkXNotImplemented + If the graph `G` is directed. + + NetworkXError + If the graph `G` is not connected, or contains no nodes, + or has edges with negative weights. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> round(nx.kemeny_constant(G), 10) + 3.2 + + Notes + ----- + The implementation is based on equation (3.3) in [2]_. + Self-loops are allowed and indicate a Markov chain where + the state can remain the same. Multi-edges are contracted + in one edge with weight equal to the sum of the weights. + + References + ---------- + .. [1] Wikipedia + "Kemeny's constant." + https://en.wikipedia.org/wiki/Kemeny%27s_constant + .. [2] Lovász L. + Random walks on graphs: A survey. + Paul Erdös is Eighty, vol. 2, Bolyai Society, + Mathematical Studies, Keszthely, Hungary (1993), pp. 1-46 + """ + import numpy as np + import scipy as sp + + if len(G) == 0: + raise nx.NetworkXError("Graph G must contain at least one node.") + if not nx.is_connected(G): + raise nx.NetworkXError("Graph G must be connected.") + if nx.is_negatively_weighted(G, weight=weight): + raise nx.NetworkXError("The weights of graph G must be nonnegative.") + + # Compute matrix H = D^-1/2 A D^-1/2 + A = nx.adjacency_matrix(G, weight=weight) + n, m = A.shape + diags = A.sum(axis=1) + with np.errstate(divide="ignore"): + diags_sqrt = 1.0 / np.sqrt(diags) + diags_sqrt[np.isinf(diags_sqrt)] = 0 + DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, m, n, format="csr")) + H = DH @ (A @ DH) + + # Compute eigenvalues of H + eig = np.sort(sp.linalg.eigvalsh(H.todense())) + + # Compute the Kemeny constant + return float(np.sum(1 / (1 - eig[:-1]))) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_regular.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_regular.py new file mode 100644 index 0000000..27b4d02 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/distance_regular.py @@ -0,0 +1,238 @@ +""" +======================= +Distance-regular graphs +======================= +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +from .distance_measures import diameter + +__all__ = [ + "is_distance_regular", + "is_strongly_regular", + "intersection_array", + "global_parameters", +] + + +@nx._dispatchable +def is_distance_regular(G): + """Returns True if the graph is distance regular, False otherwise. + + A connected graph G is distance-regular if for any nodes x,y + and any integers i,j=0,1,...,d (where d is the graph + diameter), the number of vertices at distance i from x and + distance j from y depends only on i,j and the graph distance + between x and y, independently of the choice of x and y. + + Parameters + ---------- + G: Networkx graph (undirected) + + Returns + ------- + bool + True if the graph is Distance Regular, False otherwise + + Examples + -------- + >>> G = nx.hypercube_graph(6) + >>> nx.is_distance_regular(G) + True + + See Also + -------- + intersection_array, global_parameters + + Notes + ----- + For undirected and simple graphs only + + References + ---------- + .. [1] Brouwer, A. E.; Cohen, A. M.; and Neumaier, A. + Distance-Regular Graphs. New York: Springer-Verlag, 1989. + .. [2] Weisstein, Eric W. "Distance-Regular Graph." + http://mathworld.wolfram.com/Distance-RegularGraph.html + + """ + try: + intersection_array(G) + return True + except nx.NetworkXError: + return False + + +def global_parameters(b, c): + """Returns global parameters for a given intersection array. + + Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d + such that for any 2 vertices x,y in G at a distance i=d(x,y), there + are exactly c_i neighbors of y at a distance of i-1 from x and b_i + neighbors of y at a distance of i+1 from x. + + Thus, a distance regular graph has the global parameters, + [[c_0,a_0,b_0],[c_1,a_1,b_1],......,[c_d,a_d,b_d]] for the + intersection array [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d] + where a_i+b_i+c_i=k , k= degree of every vertex. + + Parameters + ---------- + b : list + + c : list + + Returns + ------- + iterable + An iterable over three tuples. + + Examples + -------- + >>> G = nx.dodecahedral_graph() + >>> b, c = nx.intersection_array(G) + >>> list(nx.global_parameters(b, c)) + [(0, 0, 3), (1, 0, 2), (1, 1, 1), (1, 1, 1), (2, 0, 1), (3, 0, 0)] + + References + ---------- + .. [1] Weisstein, Eric W. "Global Parameters." + From MathWorld--A Wolfram Web Resource. + http://mathworld.wolfram.com/GlobalParameters.html + + See Also + -------- + intersection_array + """ + return ((y, b[0] - x - y, x) for x, y in zip(b + [0], [0] + c)) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def intersection_array(G): + """Returns the intersection array of a distance-regular graph. + + Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d + such that for any 2 vertices x,y in G at a distance i=d(x,y), there + are exactly c_i neighbors of y at a distance of i-1 from x and b_i + neighbors of y at a distance of i+1 from x. + + A distance regular graph's intersection array is given by, + [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d] + + Parameters + ---------- + G: Networkx graph (undirected) + + Returns + ------- + b,c: tuple of lists + + Examples + -------- + >>> G = nx.icosahedral_graph() + >>> nx.intersection_array(G) + ([5, 2, 1], [1, 2, 5]) + + References + ---------- + .. [1] Weisstein, Eric W. "Intersection Array." + From MathWorld--A Wolfram Web Resource. + http://mathworld.wolfram.com/IntersectionArray.html + + See Also + -------- + global_parameters + """ + # test for regular graph (all degrees must be equal) + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") + degree = iter(G.degree()) + (_, k) = next(degree) + for _, knext in degree: + if knext != k: + raise nx.NetworkXError("Graph is not distance regular.") + k = knext + path_length = dict(nx.all_pairs_shortest_path_length(G)) + diameter = max(max(path_length[n].values()) for n in path_length) + bint = {} # 'b' intersection array + cint = {} # 'c' intersection array + for u in G: + for v in G: + try: + i = path_length[u][v] + except KeyError as err: # graph must be connected + raise nx.NetworkXError("Graph is not distance regular.") from err + # number of neighbors of v at a distance of i-1 from u + c = len([n for n in G[v] if path_length[n][u] == i - 1]) + # number of neighbors of v at a distance of i+1 from u + b = len([n for n in G[v] if path_length[n][u] == i + 1]) + # b,c are independent of u and v + if cint.get(i, c) != c or bint.get(i, b) != b: + raise nx.NetworkXError("Graph is not distance regular") + bint[i] = b + cint[i] = c + return ( + [bint.get(j, 0) for j in range(diameter)], + [cint.get(j + 1, 0) for j in range(diameter)], + ) + + +# TODO There is a definition for directed strongly regular graphs. +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_strongly_regular(G): + """Returns True if and only if the given graph is strongly + regular. + + An undirected graph is *strongly regular* if + + * it is regular, + * each pair of adjacent vertices has the same number of neighbors in + common, + * each pair of nonadjacent vertices has the same number of neighbors + in common. + + Each strongly regular graph is a distance-regular graph. + Conversely, if a distance-regular graph has diameter two, then it is + a strongly regular graph. For more information on distance-regular + graphs, see :func:`is_distance_regular`. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + Returns + ------- + bool + Whether `G` is strongly regular. + + Examples + -------- + + The cycle graph on five vertices is strongly regular. It is + two-regular, each pair of adjacent vertices has no shared neighbors, + and each pair of nonadjacent vertices has one shared neighbor:: + + >>> G = nx.cycle_graph(5) + >>> nx.is_strongly_regular(G) + True + + """ + # Here is an alternate implementation based directly on the + # definition of strongly regular graphs: + # + # return (all_equal(G.degree().values()) + # and all_equal(len(common_neighbors(G, u, v)) + # for u, v in G.edges()) + # and all_equal(len(common_neighbors(G, u, v)) + # for u, v in non_edges(G))) + # + # We instead use the fact that a distance-regular graph of diameter + # two is strongly regular. + return is_distance_regular(G) and diameter(G) == 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/dominance.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/dominance.py new file mode 100644 index 0000000..30cb811 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/dominance.py @@ -0,0 +1,135 @@ +""" +Dominance algorithms. +""" + +from functools import reduce + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["immediate_dominators", "dominance_frontiers"] + + +@not_implemented_for("undirected") +@nx._dispatchable +def immediate_dominators(G, start): + """Returns the immediate dominators of all nodes of a directed graph. + + Parameters + ---------- + G : a DiGraph or MultiDiGraph + The graph where dominance is to be computed. + + start : node + The start node of dominance computation. + + Returns + ------- + idom : dict keyed by nodes + A dict containing the immediate dominators of each node reachable from + `start`. + + Raises + ------ + NetworkXNotImplemented + If `G` is undirected. + + NetworkXError + If `start` is not in `G`. + + Notes + ----- + Except for `start`, the immediate dominators are the parents of their + corresponding nodes in the dominator tree. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)]) + >>> sorted(nx.immediate_dominators(G, 1).items()) + [(1, 1), (2, 1), (3, 1), (4, 3), (5, 1)] + + References + ---------- + .. [1] Cooper, Keith D., Harvey, Timothy J. and Kennedy, Ken. + "A simple, fast dominance algorithm." (2006). + https://hdl.handle.net/1911/96345 + """ + if start not in G: + raise nx.NetworkXError("start is not in G") + + idom = {start: start} + + order = list(nx.dfs_postorder_nodes(G, start)) + dfn = {u: i for i, u in enumerate(order)} + order.pop() + order.reverse() + + def intersect(u, v): + while u != v: + while dfn[u] < dfn[v]: + u = idom[u] + while dfn[u] > dfn[v]: + v = idom[v] + return u + + changed = True + while changed: + changed = False + for u in order: + new_idom = reduce(intersect, (v for v in G.pred[u] if v in idom)) + if u not in idom or idom[u] != new_idom: + idom[u] = new_idom + changed = True + + return idom + + +@nx._dispatchable +def dominance_frontiers(G, start): + """Returns the dominance frontiers of all nodes of a directed graph. + + Parameters + ---------- + G : a DiGraph or MultiDiGraph + The graph where dominance is to be computed. + + start : node + The start node of dominance computation. + + Returns + ------- + df : dict keyed by nodes + A dict containing the dominance frontiers of each node reachable from + `start` as lists. + + Raises + ------ + NetworkXNotImplemented + If `G` is undirected. + + NetworkXError + If `start` is not in `G`. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 5), (3, 4), (4, 5)]) + >>> sorted((u, sorted(df)) for u, df in nx.dominance_frontiers(G, 1).items()) + [(1, []), (2, [5]), (3, [5]), (4, [5]), (5, [])] + + References + ---------- + .. [1] Cooper, Keith D., Harvey, Timothy J. and Kennedy, Ken. + "A simple, fast dominance algorithm." (2006). + https://hdl.handle.net/1911/96345 + """ + idom = nx.immediate_dominators(G, start) + + df = {u: set() for u in idom} + for u in idom: + if len(G.pred[u]) >= 2: + for v in G.pred[u]: + if v in idom: + while v != idom[u]: + df[v].add(u) + v = idom[v] + return df diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/dominating.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/dominating.py new file mode 100644 index 0000000..41c102a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/dominating.py @@ -0,0 +1,268 @@ +"""Functions for computing dominating sets in a graph.""" + +import math +from heapq import heappop, heappush +from itertools import chain, count + +import networkx as nx + +__all__ = [ + "dominating_set", + "is_dominating_set", + "connected_dominating_set", + "is_connected_dominating_set", +] + + +@nx._dispatchable +def dominating_set(G, start_with=None): + r"""Finds a dominating set for the graph G. + + A *dominating set* for a graph with node set *V* is a subset *D* of + *V* such that every node not in *D* is adjacent to at least one + member of *D* [1]_. + + Parameters + ---------- + G : NetworkX graph + + start_with : node (default=None) + Node to use as a starting point for the algorithm. + + Returns + ------- + D : set + A dominating set for G. + + Notes + ----- + This function is an implementation of algorithm 7 in [2]_ which + finds some dominating set, not necessarily the smallest one. + + See also + -------- + is_dominating_set + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Dominating_set + + .. [2] Abdol-Hossein Esfahanian. Connectivity Algorithms. + http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf + + """ + all_nodes = set(G) + if start_with is None: + start_with = nx.utils.arbitrary_element(all_nodes) + if start_with not in G: + raise nx.NetworkXError(f"node {start_with} is not in G") + dominating_set = {start_with} + dominated_nodes = set(G[start_with]) + remaining_nodes = all_nodes - dominated_nodes - dominating_set + while remaining_nodes: + # Choose an arbitrary node and determine its undominated neighbors. + v = remaining_nodes.pop() + undominated_nbrs = set(G[v]) - dominating_set + # Add the node to the dominating set and the neighbors to the + # dominated set. Finally, remove all of those nodes from the set + # of remaining nodes. + dominating_set.add(v) + dominated_nodes |= undominated_nbrs + remaining_nodes -= undominated_nbrs + return dominating_set + + +@nx._dispatchable +def is_dominating_set(G, nbunch): + """Checks if `nbunch` is a dominating set for `G`. + + A *dominating set* for a graph with node set *V* is a subset *D* of + *V* such that every node not in *D* is adjacent to at least one + member of *D* [1]_. + + Parameters + ---------- + G : NetworkX graph + + nbunch : iterable + An iterable of nodes in the graph `G`. + + Returns + ------- + dominating : bool + True if `nbunch` is a dominating set of `G`, false otherwise. + + See also + -------- + dominating_set + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Dominating_set + + """ + testset = {n for n in nbunch if n in G} + nbrs = set(chain.from_iterable(G[n] for n in testset)) + return len(set(G) - testset - nbrs) == 0 + + +@nx.utils.not_implemented_for("directed") +@nx._dispatchable +def connected_dominating_set(G): + """Returns a connected dominating set. + + A *dominating set* for a graph *G* with node set *V* is a subset *D* of *V* + such that every node not in *D* is adjacent to at least one member of *D* + [1]_. A *connected dominating set* is a dominating set *C* that induces a + connected subgraph of *G* [2]_. + Note that connected dominating sets are not unique in general and that there + may be other connected dominating sets. + + Parameters + ---------- + G : NewtorkX graph + Undirected connected graph. + + Returns + ------- + connected_dominating_set : set + A dominating set of nodes which induces a connected subgraph of G. + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + NetworkXError + If G is disconnected. + + Examples + ________ + >>> G = nx.Graph( + ... [ + ... (1, 2), + ... (1, 3), + ... (1, 4), + ... (1, 5), + ... (1, 6), + ... (2, 7), + ... (3, 8), + ... (4, 9), + ... (5, 10), + ... (6, 11), + ... (7, 12), + ... (8, 12), + ... (9, 12), + ... (10, 12), + ... (11, 12), + ... ] + ... ) + >>> nx.connected_dominating_set(G) + {1, 2, 3, 4, 5, 6, 7} + + Notes + ----- + This function implements Algorithm I in its basic version as described + in [3]_. The idea behind the algorithm is the following: grow a tree *T*, + starting from a node with maximum degree. Throughout the growing process, + nonleaf nodes in *T* are our connected dominating set (CDS), leaf nodes in + *T* are marked as "seen" and nodes in G that are not yet in *T* are marked as + "unseen". We maintain a max-heap of all "seen" nodes, and track the number + of "unseen" neighbors for each node. At each step we pop the heap top -- a + "seen" (leaf) node with maximal number of "unseen" neighbors, add it to the + CDS and mark all its "unseen" neighbors as "seen". For each one of the newly + created "seen" nodes, we also decrement the number of "unseen" neighbors for + all its neighbors. The algorithm terminates when there are no more "unseen" + nodes. + Runtime complexity of this implementation is $O(|E|*log|V|)$ (amortized). + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Dominating_set + .. [2] https://en.wikipedia.org/wiki/Connected_dominating_set + .. [3] Guha, S. and Khuller, S. + *Approximation Algorithms for Connected Dominating Sets*, + Algorithmica, 20, 374-387, 1998. + + """ + if len(G) == 0: + return set() + + if not nx.is_connected(G): + raise nx.NetworkXError("G must be a connected graph") + + if len(G) == 1: + return set(G) + + G_succ = G._adj # For speed-up + + # Use the count c to avoid comparing nodes + c = count() + + # Keep track of the number of unseen nodes adjacent to each node + unseen_degree = dict(G.degree) + + # Find node with highest degree and update its neighbors + (max_deg_node, max_deg) = max(unseen_degree.items(), key=lambda x: x[1]) + for nbr in G_succ[max_deg_node]: + unseen_degree[nbr] -= 1 + + # Initially all nodes except max_deg_node are unseen + unseen = set(G) - {max_deg_node} + + # We want a max-heap of the unseen-degree using heapq, which is a min-heap + # So we store the negative of the unseen-degree + seen = [(-max_deg, next(c), max_deg_node)] + + connected_dominating_set = set() + + # Main loop + while unseen: + (neg_deg, cnt, u) = heappop(seen) + # Check if u's unseen-degree changed while in the heap + if -neg_deg > unseen_degree[u]: + heappush(seen, (-unseen_degree[u], cnt, u)) + continue + # Mark all u's unseen neighbors as seen and add them to the heap + for v in G_succ[u]: + if v in unseen: + unseen.remove(v) + for nbr in G_succ[v]: + unseen_degree[nbr] -= 1 + heappush(seen, (-unseen_degree[v], next(c), v)) + # Add u to the dominating set + connected_dominating_set.add(u) + + return connected_dominating_set + + +@nx.utils.not_implemented_for("directed") +@nx._dispatchable +def is_connected_dominating_set(G, nbunch): + """Checks if `nbunch` is a connected dominating set for `G`. + + A *dominating set* for a graph *G* with node set *V* is a subset *D* of + *V* such that every node not in *D* is adjacent to at least one + member of *D* [1]_. A *connected dominating set* is a dominating + set *C* that induces a connected subgraph of *G* [2]_. + + Parameters + ---------- + G : NetworkX graph + Undirected graph. + + nbunch : iterable + An iterable of nodes in the graph `G`. + + Returns + ------- + connected_dominating : bool + True if `nbunch` is connected dominating set of `G`, false otherwise. + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Dominating_set + .. [2] https://en.wikipedia.org/wiki/Connected_dominating_set + + """ + return nx.is_dominating_set(G, nbunch) and nx.is_connected(nx.subgraph(G, nbunch)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/efficiency_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/efficiency_measures.py new file mode 100644 index 0000000..b8e9d7a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/efficiency_measures.py @@ -0,0 +1,167 @@ +"""Provides functions for computing the efficiency of nodes and graphs.""" + +import networkx as nx +from networkx.exception import NetworkXNoPath + +from ..utils import not_implemented_for + +__all__ = ["efficiency", "local_efficiency", "global_efficiency"] + + +@not_implemented_for("directed") +@nx._dispatchable +def efficiency(G, u, v): + """Returns the efficiency of a pair of nodes in a graph. + + The *efficiency* of a pair of nodes is the multiplicative inverse of the + shortest path distance between the nodes [1]_. Returns 0 if no path + between nodes. + + Parameters + ---------- + G : :class:`networkx.Graph` + An undirected graph for which to compute the average local efficiency. + u, v : node + Nodes in the graph ``G``. + + Returns + ------- + float + Multiplicative inverse of the shortest path distance between the nodes. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> nx.efficiency(G, 2, 3) # this gives efficiency for node 2 and 3 + 0.5 + + Notes + ----- + Edge weights are ignored when computing the shortest path distances. + + See also + -------- + local_efficiency + global_efficiency + + References + ---------- + .. [1] Latora, Vito, and Massimo Marchiori. + "Efficient behavior of small-world networks." + *Physical Review Letters* 87.19 (2001): 198701. + + + """ + try: + eff = 1 / nx.shortest_path_length(G, u, v) + except NetworkXNoPath: + eff = 0 + return eff + + +@not_implemented_for("directed") +@nx._dispatchable +def global_efficiency(G): + """Returns the average global efficiency of the graph. + + The *efficiency* of a pair of nodes in a graph is the multiplicative + inverse of the shortest path distance between the nodes. The *average + global efficiency* of a graph is the average efficiency of all pairs of + nodes [1]_. + + Parameters + ---------- + G : :class:`networkx.Graph` + An undirected graph for which to compute the average global efficiency. + + Returns + ------- + float + The average global efficiency of the graph. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> round(nx.global_efficiency(G), 12) + 0.916666666667 + + Notes + ----- + Edge weights are ignored when computing the shortest path distances. + + See also + -------- + local_efficiency + + References + ---------- + .. [1] Latora, Vito, and Massimo Marchiori. + "Efficient behavior of small-world networks." + *Physical Review Letters* 87.19 (2001): 198701. + + + """ + n = len(G) + denom = n * (n - 1) + if denom != 0: + lengths = nx.all_pairs_shortest_path_length(G) + g_eff = 0 + for source, targets in lengths: + for target, distance in targets.items(): + if distance > 0: + g_eff += 1 / distance + g_eff /= denom + # g_eff = sum(1 / d for s, tgts in lengths + # for t, d in tgts.items() if d > 0) / denom + else: + g_eff = 0 + # TODO This can be made more efficient by computing all pairs shortest + # path lengths in parallel. + return g_eff + + +@not_implemented_for("directed") +@nx._dispatchable +def local_efficiency(G): + """Returns the average local efficiency of the graph. + + The *efficiency* of a pair of nodes in a graph is the multiplicative + inverse of the shortest path distance between the nodes. The *local + efficiency* of a node in the graph is the average global efficiency of the + subgraph induced by the neighbors of the node. The *average local + efficiency* is the average of the local efficiencies of each node [1]_. + + Parameters + ---------- + G : :class:`networkx.Graph` + An undirected graph for which to compute the average local efficiency. + + Returns + ------- + float + The average local efficiency of the graph. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)]) + >>> nx.local_efficiency(G) + 0.9166666666666667 + + Notes + ----- + Edge weights are ignored when computing the shortest path distances. + + See also + -------- + global_efficiency + + References + ---------- + .. [1] Latora, Vito, and Massimo Marchiori. + "Efficient behavior of small-world networks." + *Physical Review Letters* 87.19 (2001): 198701. + + + """ + efficiency_list = (global_efficiency(G.subgraph(G[v])) for v in G) + return sum(efficiency_list) / len(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/euler.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/euler.py new file mode 100644 index 0000000..2c308e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/euler.py @@ -0,0 +1,470 @@ +""" +Eulerian circuits and graphs. +""" + +from itertools import combinations + +import networkx as nx + +from ..utils import arbitrary_element, not_implemented_for + +__all__ = [ + "is_eulerian", + "eulerian_circuit", + "eulerize", + "is_semieulerian", + "has_eulerian_path", + "eulerian_path", +] + + +@nx._dispatchable +def is_eulerian(G): + """Returns True if and only if `G` is Eulerian. + + A graph is *Eulerian* if it has an Eulerian circuit. An *Eulerian + circuit* is a closed walk that includes each edge of a graph exactly + once. + + Graphs with isolated vertices (i.e. vertices with zero degree) are not + considered to have Eulerian circuits. Therefore, if the graph is not + connected (or not strongly connected, for directed graphs), this function + returns False. + + Parameters + ---------- + G : NetworkX graph + A graph, either directed or undirected. + + Examples + -------- + >>> nx.is_eulerian(nx.DiGraph({0: [3], 1: [2], 2: [3], 3: [0, 1]})) + True + >>> nx.is_eulerian(nx.complete_graph(5)) + True + >>> nx.is_eulerian(nx.petersen_graph()) + False + + If you prefer to allow graphs with isolated vertices to have Eulerian circuits, + you can first remove such vertices and then call `is_eulerian` as below example shows. + + >>> G = nx.Graph([(0, 1), (1, 2), (0, 2)]) + >>> G.add_node(3) + >>> nx.is_eulerian(G) + False + + >>> G.remove_nodes_from(list(nx.isolates(G))) + >>> nx.is_eulerian(G) + True + + + """ + if G.is_directed(): + # Every node must have equal in degree and out degree and the + # graph must be strongly connected + return all( + G.in_degree(n) == G.out_degree(n) for n in G + ) and nx.is_strongly_connected(G) + # An undirected Eulerian graph has no vertices of odd degree and + # must be connected. + return all(d % 2 == 0 for v, d in G.degree()) and nx.is_connected(G) + + +@nx._dispatchable +def is_semieulerian(G): + """Return True iff `G` is semi-Eulerian. + + G is semi-Eulerian if it has an Eulerian path but no Eulerian circuit. + + See Also + -------- + has_eulerian_path + is_eulerian + """ + return has_eulerian_path(G) and not is_eulerian(G) + + +def _find_path_start(G): + """Return a suitable starting vertex for an Eulerian path. + + If no path exists, return None. + """ + if not has_eulerian_path(G): + return None + + if is_eulerian(G): + return arbitrary_element(G) + + if G.is_directed(): + v1, v2 = (v for v in G if G.in_degree(v) != G.out_degree(v)) + # Determines which is the 'start' node (as opposed to the 'end') + if G.out_degree(v1) > G.in_degree(v1): + return v1 + else: + return v2 + + else: + # In an undirected graph randomly choose one of the possibilities + start = [v for v in G if G.degree(v) % 2 != 0][0] + return start + + +def _simplegraph_eulerian_circuit(G, source): + if G.is_directed(): + degree = G.out_degree + edges = G.out_edges + else: + degree = G.degree + edges = G.edges + vertex_stack = [source] + last_vertex = None + while vertex_stack: + current_vertex = vertex_stack[-1] + if degree(current_vertex) == 0: + if last_vertex is not None: + yield (last_vertex, current_vertex) + last_vertex = current_vertex + vertex_stack.pop() + else: + _, next_vertex = arbitrary_element(edges(current_vertex)) + vertex_stack.append(next_vertex) + G.remove_edge(current_vertex, next_vertex) + + +def _multigraph_eulerian_circuit(G, source): + if G.is_directed(): + degree = G.out_degree + edges = G.out_edges + else: + degree = G.degree + edges = G.edges + vertex_stack = [(source, None)] + last_vertex = None + last_key = None + while vertex_stack: + current_vertex, current_key = vertex_stack[-1] + if degree(current_vertex) == 0: + if last_vertex is not None: + yield (last_vertex, current_vertex, last_key) + last_vertex, last_key = current_vertex, current_key + vertex_stack.pop() + else: + triple = arbitrary_element(edges(current_vertex, keys=True)) + _, next_vertex, next_key = triple + vertex_stack.append((next_vertex, next_key)) + G.remove_edge(current_vertex, next_vertex, next_key) + + +@nx._dispatchable +def eulerian_circuit(G, source=None, keys=False): + """Returns an iterator over the edges of an Eulerian circuit in `G`. + + An *Eulerian circuit* is a closed walk that includes each edge of a + graph exactly once. + + Parameters + ---------- + G : NetworkX graph + A graph, either directed or undirected. + + source : node, optional + Starting node for circuit. + + keys : bool + If False, edges generated by this function will be of the form + ``(u, v)``. Otherwise, edges will be of the form ``(u, v, k)``. + This option is ignored unless `G` is a multigraph. + + Returns + ------- + edges : iterator + An iterator over edges in the Eulerian circuit. + + Raises + ------ + NetworkXError + If the graph is not Eulerian. + + See Also + -------- + is_eulerian + + Notes + ----- + This is a linear time implementation of an algorithm adapted from [1]_. + + For general information about Euler tours, see [2]_. + + References + ---------- + .. [1] J. Edmonds, E. L. Johnson. + Matching, Euler tours and the Chinese postman. + Mathematical programming, Volume 5, Issue 1 (1973), 111-114. + .. [2] https://en.wikipedia.org/wiki/Eulerian_path + + Examples + -------- + To get an Eulerian circuit in an undirected graph:: + + >>> G = nx.complete_graph(3) + >>> list(nx.eulerian_circuit(G)) + [(0, 2), (2, 1), (1, 0)] + >>> list(nx.eulerian_circuit(G, source=1)) + [(1, 2), (2, 0), (0, 1)] + + To get the sequence of vertices in an Eulerian circuit:: + + >>> [u for u, v in nx.eulerian_circuit(G)] + [0, 2, 1] + + """ + if not is_eulerian(G): + raise nx.NetworkXError("G is not Eulerian.") + if G.is_directed(): + G = G.reverse() + else: + G = G.copy() + if source is None: + source = arbitrary_element(G) + if G.is_multigraph(): + for u, v, k in _multigraph_eulerian_circuit(G, source): + if keys: + yield u, v, k + else: + yield u, v + else: + yield from _simplegraph_eulerian_circuit(G, source) + + +@nx._dispatchable +def has_eulerian_path(G, source=None): + """Return True iff `G` has an Eulerian path. + + An Eulerian path is a path in a graph which uses each edge of a graph + exactly once. If `source` is specified, then this function checks + whether an Eulerian path that starts at node `source` exists. + + A directed graph has an Eulerian path iff: + - at most one vertex has out_degree - in_degree = 1, + - at most one vertex has in_degree - out_degree = 1, + - every other vertex has equal in_degree and out_degree, + - and all of its vertices belong to a single connected + component of the underlying undirected graph. + + If `source` is not None, an Eulerian path starting at `source` exists if no + other node has out_degree - in_degree = 1. This is equivalent to either + there exists an Eulerian circuit or `source` has out_degree - in_degree = 1 + and the conditions above hold. + + An undirected graph has an Eulerian path iff: + - exactly zero or two vertices have odd degree, + - and all of its vertices belong to a single connected component. + + If `source` is not None, an Eulerian path starting at `source` exists if + either there exists an Eulerian circuit or `source` has an odd degree and the + conditions above hold. + + Graphs with isolated vertices (i.e. vertices with zero degree) are not considered + to have an Eulerian path. Therefore, if the graph is not connected (or not strongly + connected, for directed graphs), this function returns False. + + Parameters + ---------- + G : NetworkX Graph + The graph to find an euler path in. + + source : node, optional + Starting node for path. + + Returns + ------- + Bool : True if G has an Eulerian path. + + Examples + -------- + If you prefer to allow graphs with isolated vertices to have Eulerian path, + you can first remove such vertices and then call `has_eulerian_path` as below example shows. + + >>> G = nx.Graph([(0, 1), (1, 2), (0, 2)]) + >>> G.add_node(3) + >>> nx.has_eulerian_path(G) + False + + >>> G.remove_nodes_from(list(nx.isolates(G))) + >>> nx.has_eulerian_path(G) + True + + See Also + -------- + is_eulerian + eulerian_path + """ + if nx.is_eulerian(G): + return True + + if G.is_directed(): + ins = G.in_degree + outs = G.out_degree + # Since we know it is not eulerian, outs - ins must be 1 for source + if source is not None and outs[source] - ins[source] != 1: + return False + + unbalanced_ins = 0 + unbalanced_outs = 0 + for v in G: + if ins[v] - outs[v] == 1: + unbalanced_ins += 1 + elif outs[v] - ins[v] == 1: + unbalanced_outs += 1 + elif ins[v] != outs[v]: + return False + + return ( + unbalanced_ins <= 1 and unbalanced_outs <= 1 and nx.is_weakly_connected(G) + ) + else: + # We know it is not eulerian, so degree of source must be odd. + if source is not None and G.degree[source] % 2 != 1: + return False + + # Sum is 2 since we know it is not eulerian (which implies sum is 0) + return sum(d % 2 == 1 for v, d in G.degree()) == 2 and nx.is_connected(G) + + +@nx._dispatchable +def eulerian_path(G, source=None, keys=False): + """Return an iterator over the edges of an Eulerian path in `G`. + + Parameters + ---------- + G : NetworkX Graph + The graph in which to look for an eulerian path. + source : node or None (default: None) + The node at which to start the search. None means search over all + starting nodes. + keys : Bool (default: False) + Indicates whether to yield edge 3-tuples (u, v, edge_key). + The default yields edge 2-tuples + + Yields + ------ + Edge tuples along the eulerian path. + + Warning: If `source` provided is not the start node of an Euler path + will raise error even if an Euler Path exists. + """ + if not has_eulerian_path(G, source): + raise nx.NetworkXError("Graph has no Eulerian paths.") + if G.is_directed(): + G = G.reverse() + if source is None or nx.is_eulerian(G) is False: + source = _find_path_start(G) + if G.is_multigraph(): + for u, v, k in _multigraph_eulerian_circuit(G, source): + if keys: + yield u, v, k + else: + yield u, v + else: + yield from _simplegraph_eulerian_circuit(G, source) + else: + G = G.copy() + if source is None: + source = _find_path_start(G) + if G.is_multigraph(): + if keys: + yield from reversed( + [(v, u, k) for u, v, k in _multigraph_eulerian_circuit(G, source)] + ) + else: + yield from reversed( + [(v, u) for u, v, k in _multigraph_eulerian_circuit(G, source)] + ) + else: + yield from reversed( + [(v, u) for u, v in _simplegraph_eulerian_circuit(G, source)] + ) + + +@not_implemented_for("directed") +@nx._dispatchable(returns_graph=True) +def eulerize(G): + """Transforms a graph into an Eulerian graph. + + If `G` is Eulerian the result is `G` as a MultiGraph, otherwise the result is a smallest + (in terms of the number of edges) multigraph whose underlying simple graph is `G`. + + Parameters + ---------- + G : NetworkX graph + An undirected graph + + Returns + ------- + G : NetworkX multigraph + + Raises + ------ + NetworkXError + If the graph is not connected. + + See Also + -------- + is_eulerian + eulerian_circuit + + References + ---------- + .. [1] J. Edmonds, E. L. Johnson. + Matching, Euler tours and the Chinese postman. + Mathematical programming, Volume 5, Issue 1 (1973), 111-114. + .. [2] https://en.wikipedia.org/wiki/Eulerian_path + .. [3] http://web.math.princeton.edu/math_alive/5/Notes1.pdf + + Examples + -------- + >>> G = nx.complete_graph(10) + >>> H = nx.eulerize(G) + >>> nx.is_eulerian(H) + True + + """ + if G.order() == 0: + raise nx.NetworkXPointlessConcept("Cannot Eulerize null graph") + if not nx.is_connected(G): + raise nx.NetworkXError("G is not connected") + odd_degree_nodes = [n for n, d in G.degree() if d % 2 == 1] + G = nx.MultiGraph(G) + if len(odd_degree_nodes) == 0: + return G + + # get all shortest paths between vertices of odd degree + odd_deg_pairs_paths = [ + (m, {n: nx.shortest_path(G, source=m, target=n)}) + for m, n in combinations(odd_degree_nodes, 2) + ] + + # use the number of vertices in a graph + 1 as an upper bound on + # the maximum length of a path in G + upper_bound_on_max_path_length = len(G) + 1 + + # use "len(G) + 1 - len(P)", + # where P is a shortest path between vertices n and m, + # as edge-weights in a new graph + # store the paths in the graph for easy indexing later + Gp = nx.Graph() + for n, Ps in odd_deg_pairs_paths: + for m, P in Ps.items(): + if n != m: + Gp.add_edge( + m, n, weight=upper_bound_on_max_path_length - len(P), path=P + ) + + # find the minimum weight matching of edges in the weighted graph + best_matching = nx.Graph(list(nx.max_weight_matching(Gp))) + + # duplicate each edge along each path in the set of paths in Gp + for m, n in best_matching.edges(): + path = Gp[m][n]["path"] + G.add_edges_from(nx.utils.pairwise(path)) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__init__.py new file mode 100644 index 0000000..c5d19ab --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__init__.py @@ -0,0 +1,11 @@ +from .maxflow import * +from .mincost import * +from .boykovkolmogorov import * +from .dinitz_alg import * +from .edmondskarp import * +from .gomory_hu import * +from .preflowpush import * +from .shortestaugmentingpath import * +from .capacityscaling import * +from .networksimplex import * +from .utils import build_flow_dict, build_residual_network diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3ac682c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-312.pyc new file mode 100644 index 0000000..f1493ed Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/boykovkolmogorov.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/capacityscaling.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/capacityscaling.cpython-312.pyc new file mode 100644 index 0000000..554874e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/capacityscaling.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-312.pyc new file mode 100644 index 0000000..b876aa9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/dinitz_alg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/edmondskarp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/edmondskarp.cpython-312.pyc new file mode 100644 index 0000000..9fa503c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/edmondskarp.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-312.pyc new file mode 100644 index 0000000..6427f65 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/gomory_hu.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/maxflow.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/maxflow.cpython-312.pyc new file mode 100644 index 0000000..8a6c735 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/maxflow.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-312.pyc new file mode 100644 index 0000000..174eb07 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/mincost.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/networksimplex.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/networksimplex.cpython-312.pyc new file mode 100644 index 0000000..b6f819c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/networksimplex.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/preflowpush.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/preflowpush.cpython-312.pyc new file mode 100644 index 0000000..8d9260f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/preflowpush.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-312.pyc new file mode 100644 index 0000000..eea7e89 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/shortestaugmentingpath.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/utils.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000..2490dbc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/__pycache__/utils.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/boykovkolmogorov.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/boykovkolmogorov.py new file mode 100644 index 0000000..30899c6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/boykovkolmogorov.py @@ -0,0 +1,370 @@ +""" +Boykov-Kolmogorov algorithm for maximum flow problems. +""" + +from collections import deque +from operator import itemgetter + +import networkx as nx +from networkx.algorithms.flow.utils import build_residual_network + +__all__ = ["boykov_kolmogorov"] + + +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) +def boykov_kolmogorov( + G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None +): + r"""Find a maximum single-commodity flow using Boykov-Kolmogorov algorithm. + + This function returns the residual network resulting after computing + the maximum flow. See below for details about the conventions + NetworkX uses for defining residual networks. + + This algorithm has worse case complexity $O(n^2 m |C|)$ for $n$ nodes, $m$ + edges, and $|C|$ the cost of the minimum cut [1]_. This implementation + uses the marking heuristic defined in [2]_ which improves its running + time in many practical problems. + + Parameters + ---------- + G : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + s : node + Source node for the flow. + + t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + residual : NetworkX graph + Residual network on which the algorithm is to be executed. If None, a + new residual network is created. Default value: None. + + value_only : bool + If True compute only the value of the maximum flow. This parameter + will be ignored by this algorithm because it is not applicable. + + cutoff : integer, float + If specified, the algorithm will terminate when the flow value reaches + or exceeds the cutoff. In this case, it may be unable to immediately + determine a minimum cut. Default value: None. + + Returns + ------- + R : NetworkX DiGraph + Residual network after computing the maximum flow. + + Raises + ------ + NetworkXError + The algorithm does not support MultiGraph and MultiDiGraph. If + the input graph is an instance of one of these two classes, a + NetworkXError is raised. + + NetworkXUnbounded + If the graph has a path of infinite capacity, the value of a + feasible flow on the graph is unbounded above and the function + raises a NetworkXUnbounded. + + See also + -------- + :meth:`maximum_flow` + :meth:`minimum_cut` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Notes + ----- + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not + specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such + that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Examples + -------- + >>> from networkx.algorithms.flow import boykov_kolmogorov + + The functions that implement flow algorithms and output a residual + network, such as this one, are not imported to the base NetworkX + namespace, so you have to explicitly import them from the flow package. + + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = boykov_kolmogorov(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") + >>> flow_value + 3.0 + >>> flow_value == R.graph["flow_value"] + True + + A nice feature of the Boykov-Kolmogorov algorithm is that a partition + of the nodes that defines a minimum cut can be easily computed based + on the search trees used during the algorithm. These trees are stored + in the graph attribute `trees` of the residual network. + + >>> source_tree, target_tree = R.graph["trees"] + >>> partition = (set(source_tree), set(G) - set(source_tree)) + + Or equivalently: + + >>> partition = (set(G) - set(target_tree), set(target_tree)) + + References + ---------- + .. [1] Boykov, Y., & Kolmogorov, V. (2004). An experimental comparison + of min-cut/max-flow algorithms for energy minimization in vision. + Pattern Analysis and Machine Intelligence, IEEE Transactions on, + 26(9), 1124-1137. + https://doi.org/10.1109/TPAMI.2004.60 + + .. [2] Vladimir Kolmogorov. Graph-based Algorithms for Multi-camera + Reconstruction Problem. PhD thesis, Cornell University, CS Department, + 2003. pp. 109-114. + https://web.archive.org/web/20170809091249/https://pub.ist.ac.at/~vnk/papers/thesis.pdf + + """ + R = boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff) + R.graph["algorithm"] = "boykov_kolmogorov" + nx._clear_cache(R) + return R + + +def boykov_kolmogorov_impl(G, s, t, capacity, residual, cutoff): + if s not in G: + raise nx.NetworkXError(f"node {str(s)} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {str(t)} not in graph") + if s == t: + raise nx.NetworkXError("source and sink are the same node") + + if residual is None: + R = build_residual_network(G, capacity) + else: + R = residual + + # Initialize/reset the residual network. + # This is way too slow + # nx.set_edge_attributes(R, 0, 'flow') + for u in R: + for e in R[u].values(): + e["flow"] = 0 + + # Use an arbitrary high value as infinite. It is computed + # when building the residual network. + INF = R.graph["inf"] + + if cutoff is None: + cutoff = INF + + R_succ = R.succ + R_pred = R.pred + + def grow(): + """Bidirectional breadth-first search for the growth stage. + + Returns a connecting edge, that is and edge that connects + a node from the source search tree with a node from the + target search tree. + The first node in the connecting edge is always from the + source tree and the last node from the target tree. + """ + while active: + u = active[0] + if u in source_tree: + this_tree = source_tree + other_tree = target_tree + neighbors = R_succ + else: + this_tree = target_tree + other_tree = source_tree + neighbors = R_pred + for v, attr in neighbors[u].items(): + if attr["capacity"] - attr["flow"] > 0: + if v not in this_tree: + if v in other_tree: + return (u, v) if this_tree is source_tree else (v, u) + this_tree[v] = u + dist[v] = dist[u] + 1 + timestamp[v] = timestamp[u] + active.append(v) + elif v in this_tree and _is_closer(u, v): + this_tree[v] = u + dist[v] = dist[u] + 1 + timestamp[v] = timestamp[u] + _ = active.popleft() + return None, None + + def augment(u, v): + """Augmentation stage. + + Reconstruct path and determine its residual capacity. + We start from a connecting edge, which links a node + from the source tree to a node from the target tree. + The connecting edge is the output of the grow function + and the input of this function. + """ + attr = R_succ[u][v] + flow = min(INF, attr["capacity"] - attr["flow"]) + path = [u] + # Trace a path from u to s in source_tree. + w = u + while w != s: + n = w + w = source_tree[n] + attr = R_pred[n][w] + flow = min(flow, attr["capacity"] - attr["flow"]) + path.append(w) + path.reverse() + # Trace a path from v to t in target_tree. + path.append(v) + w = v + while w != t: + n = w + w = target_tree[n] + attr = R_succ[n][w] + flow = min(flow, attr["capacity"] - attr["flow"]) + path.append(w) + # Augment flow along the path and check for saturated edges. + it = iter(path) + u = next(it) + these_orphans = [] + for v in it: + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow + if R_succ[u][v]["flow"] == R_succ[u][v]["capacity"]: + if v in source_tree: + source_tree[v] = None + these_orphans.append(v) + if u in target_tree: + target_tree[u] = None + these_orphans.append(u) + u = v + orphans.extend(sorted(these_orphans, key=dist.get)) + return flow + + def adopt(): + """Adoption stage. + + Reconstruct search trees by adopting or discarding orphans. + During augmentation stage some edges got saturated and thus + the source and target search trees broke down to forests, with + orphans as roots of some of its trees. We have to reconstruct + the search trees rooted to source and target before we can grow + them again. + """ + while orphans: + u = orphans.popleft() + if u in source_tree: + tree = source_tree + neighbors = R_pred + else: + tree = target_tree + neighbors = R_succ + nbrs = ((n, attr, dist[n]) for n, attr in neighbors[u].items() if n in tree) + for v, attr, d in sorted(nbrs, key=itemgetter(2)): + if attr["capacity"] - attr["flow"] > 0: + if _has_valid_root(v, tree): + tree[u] = v + dist[u] = dist[v] + 1 + timestamp[u] = time + break + else: + nbrs = ( + (n, attr, dist[n]) for n, attr in neighbors[u].items() if n in tree + ) + for v, attr, d in sorted(nbrs, key=itemgetter(2)): + if attr["capacity"] - attr["flow"] > 0: + if v not in active: + active.append(v) + if tree[v] == u: + tree[v] = None + orphans.appendleft(v) + if u in active: + active.remove(u) + del tree[u] + + def _has_valid_root(n, tree): + path = [] + v = n + while v is not None: + path.append(v) + if v in (s, t): + base_dist = 0 + break + elif timestamp[v] == time: + base_dist = dist[v] + break + v = tree[v] + else: + return False + length = len(path) + for i, u in enumerate(path, 1): + dist[u] = base_dist + length - i + timestamp[u] = time + return True + + def _is_closer(u, v): + return timestamp[v] <= timestamp[u] and dist[v] > dist[u] + 1 + + source_tree = {s: None} + target_tree = {t: None} + active = deque([s, t]) + orphans = deque() + flow_value = 0 + # data structures for the marking heuristic + time = 1 + timestamp = {s: time, t: time} + dist = {s: 0, t: 0} + while flow_value < cutoff: + # Growth stage + u, v = grow() + if u is None: + break + time += 1 + # Augmentation stage + flow_value += augment(u, v) + # Adoption stage + adopt() + + if flow_value * 2 > INF: + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") + + # Add source and target tree in a graph attribute. + # A partition that defines a minimum cut can be directly + # computed from the search trees as explained in the docstrings. + R.graph["trees"] = (source_tree, target_tree) + # Add the standard flow_value graph attribute. + R.graph["flow_value"] = flow_value + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/capacityscaling.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/capacityscaling.py new file mode 100644 index 0000000..bf68565 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/capacityscaling.py @@ -0,0 +1,407 @@ +""" +Capacity scaling minimum cost flow algorithm. +""" + +__all__ = ["capacity_scaling"] + +from itertools import chain +from math import log + +import networkx as nx + +from ...utils import BinaryHeap, arbitrary_element, not_implemented_for + + +def _detect_unboundedness(R): + """Detect infinite-capacity negative cycles.""" + G = nx.DiGraph() + G.add_nodes_from(R) + + # Value simulating infinity. + inf = R.graph["inf"] + # True infinity. + f_inf = float("inf") + for u in R: + for v, e in R[u].items(): + # Compute the minimum weight of infinite-capacity (u, v) edges. + w = f_inf + for k, e in e.items(): + if e["capacity"] == inf: + w = min(w, e["weight"]) + if w != f_inf: + G.add_edge(u, v, weight=w) + + if nx.negative_edge_cycle(G): + raise nx.NetworkXUnbounded( + "Negative cost cycle of infinite capacity found. " + "Min cost flow may be unbounded below." + ) + + +@not_implemented_for("undirected") +def _build_residual_network(G, demand, capacity, weight): + """Build a residual network and initialize a zero flow.""" + if sum(G.nodes[u].get(demand, 0) for u in G) != 0: + raise nx.NetworkXUnfeasible("Sum of the demands should be 0.") + + R = nx.MultiDiGraph() + R.add_nodes_from( + (u, {"excess": -G.nodes[u].get(demand, 0), "potential": 0}) for u in G + ) + + inf = float("inf") + # Detect selfloops with infinite capacities and negative weights. + for u, v, e in nx.selfloop_edges(G, data=True): + if e.get(weight, 0) < 0 and e.get(capacity, inf) == inf: + raise nx.NetworkXUnbounded( + "Negative cost cycle of infinite capacity found. " + "Min cost flow may be unbounded below." + ) + + # Extract edges with positive capacities. Self loops excluded. + if G.is_multigraph(): + edge_list = [ + (u, v, k, e) + for u, v, k, e in G.edges(data=True, keys=True) + if u != v and e.get(capacity, inf) > 0 + ] + else: + edge_list = [ + (u, v, 0, e) + for u, v, e in G.edges(data=True) + if u != v and e.get(capacity, inf) > 0 + ] + # Simulate infinity with the larger of the sum of absolute node imbalances + # the sum of finite edge capacities or any positive value if both sums are + # zero. This allows the infinite-capacity edges to be distinguished for + # unboundedness detection and directly participate in residual capacity + # calculation. + inf = ( + max( + sum(abs(R.nodes[u]["excess"]) for u in R), + 2 + * sum( + e[capacity] + for u, v, k, e in edge_list + if capacity in e and e[capacity] != inf + ), + ) + or 1 + ) + for u, v, k, e in edge_list: + r = min(e.get(capacity, inf), inf) + w = e.get(weight, 0) + # Add both (u, v) and (v, u) into the residual network marked with the + # original key. (key[1] == True) indicates the (u, v) is in the + # original network. + R.add_edge(u, v, key=(k, True), capacity=r, weight=w, flow=0) + R.add_edge(v, u, key=(k, False), capacity=0, weight=-w, flow=0) + + # Record the value simulating infinity. + R.graph["inf"] = inf + + _detect_unboundedness(R) + + return R + + +def _build_flow_dict(G, R, capacity, weight): + """Build a flow dictionary from a residual network.""" + inf = float("inf") + flow_dict = {} + if G.is_multigraph(): + for u in G: + flow_dict[u] = {} + for v, es in G[u].items(): + flow_dict[u][v] = { + # Always saturate negative selfloops. + k: ( + 0 + if ( + u != v or e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0 + ) + else e[capacity] + ) + for k, e in es.items() + } + for v, es in R[u].items(): + if v in flow_dict[u]: + flow_dict[u][v].update( + (k[0], e["flow"]) for k, e in es.items() if e["flow"] > 0 + ) + else: + for u in G: + flow_dict[u] = { + # Always saturate negative selfloops. + v: ( + 0 + if (u != v or e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0) + else e[capacity] + ) + for v, e in G[u].items() + } + flow_dict[u].update( + (v, e["flow"]) + for v, es in R[u].items() + for e in es.values() + if e["flow"] > 0 + ) + return flow_dict + + +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) +def capacity_scaling( + G, demand="demand", capacity="capacity", weight="weight", heap=BinaryHeap +): + r"""Find a minimum cost flow satisfying all demands in digraph G. + + This is a capacity scaling successive shortest augmenting path algorithm. + + G is a digraph with edge costs and capacities and in which nodes + have demand, i.e., they want to send or receive some amount of + flow. A negative demand means that the node wants to send flow, a + positive demand means that the node want to receive flow. A flow on + the digraph G satisfies all demand if the net flow into each node + is equal to the demand of that node. + + Parameters + ---------- + G : NetworkX graph + DiGraph or MultiDiGraph on which a minimum cost flow satisfying all + demands is to be found. + + demand : string + Nodes of the graph G are expected to have an attribute demand + that indicates how much flow a node wants to send (negative + demand) or receive (positive demand). Note that the sum of the + demands should be 0 otherwise the problem in not feasible. If + this attribute is not present, a node is considered to have 0 + demand. Default value: 'demand'. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + weight : string + Edges of the graph G are expected to have an attribute weight + that indicates the cost incurred by sending one unit of flow on + that edge. If not present, the weight is considered to be 0. + Default value: 'weight'. + + heap : class + Type of heap to be used in the algorithm. It should be a subclass of + :class:`MinHeap` or implement a compatible interface. + + If a stock heap implementation is to be used, :class:`BinaryHeap` is + recommended over :class:`PairingHeap` for Python implementations without + optimized attribute accesses (e.g., CPython) despite a slower + asymptotic running time. For Python implementations with optimized + attribute accesses (e.g., PyPy), :class:`PairingHeap` provides better + performance. Default value: :class:`BinaryHeap`. + + Returns + ------- + flowCost : integer + Cost of a minimum cost flow satisfying all demands. + + flowDict : dictionary + If G is a digraph, a dict-of-dicts keyed by nodes such that + flowDict[u][v] is the flow on edge (u, v). + If G is a MultiDiGraph, a dict-of-dicts-of-dicts keyed by nodes + so that flowDict[u][v][key] is the flow on edge (u, v, key). + + Raises + ------ + NetworkXError + This exception is raised if the input graph is not directed, + not connected. + + NetworkXUnfeasible + This exception is raised in the following situations: + + * The sum of the demands is not zero. Then, there is no + flow satisfying all demands. + * There is no flow satisfying all demand. + + NetworkXUnbounded + This exception is raised if the digraph G has a cycle of + negative cost and infinite capacity. Then, the cost of a flow + satisfying all demands is unbounded below. + + Notes + ----- + This algorithm does not work if edge weights are floating-point numbers. + + See also + -------- + :meth:`network_simplex` + + Examples + -------- + A simple example of a min cost flow problem. + + >>> G = nx.DiGraph() + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) + >>> flowCost, flowDict = nx.capacity_scaling(G) + >>> flowCost + 24 + >>> flowDict + {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}} + + It is possible to change the name of the attributes used for the + algorithm. + + >>> G = nx.DiGraph() + >>> G.add_node("p", spam=-4) + >>> G.add_node("q", spam=2) + >>> G.add_node("a", spam=-2) + >>> G.add_node("d", spam=-1) + >>> G.add_node("t", spam=2) + >>> G.add_node("w", spam=3) + >>> G.add_edge("p", "q", cost=7, vacancies=5) + >>> G.add_edge("p", "a", cost=1, vacancies=4) + >>> G.add_edge("q", "d", cost=2, vacancies=3) + >>> G.add_edge("t", "q", cost=1, vacancies=2) + >>> G.add_edge("a", "t", cost=2, vacancies=4) + >>> G.add_edge("d", "w", cost=3, vacancies=4) + >>> G.add_edge("t", "w", cost=4, vacancies=1) + >>> flowCost, flowDict = nx.capacity_scaling( + ... G, demand="spam", capacity="vacancies", weight="cost" + ... ) + >>> flowCost + 37 + >>> flowDict + {'p': {'q': 2, 'a': 2}, 'q': {'d': 1}, 'a': {'t': 4}, 'd': {'w': 2}, 't': {'q': 1, 'w': 1}, 'w': {}} + """ + R = _build_residual_network(G, demand, capacity, weight) + + inf = float("inf") + # Account cost of negative selfloops. + flow_cost = sum( + 0 + if e.get(capacity, inf) <= 0 or e.get(weight, 0) >= 0 + else e[capacity] * e[weight] + for u, v, e in nx.selfloop_edges(G, data=True) + ) + + # Determine the maximum edge capacity. + wmax = max(chain([-inf], (e["capacity"] for u, v, e in R.edges(data=True)))) + if wmax == -inf: + # Residual network has no edges. + return flow_cost, _build_flow_dict(G, R, capacity, weight) + + R_nodes = R.nodes + R_succ = R.succ + + delta = 2 ** int(log(wmax, 2)) + while delta >= 1: + # Saturate Δ-residual edges with negative reduced costs to achieve + # Δ-optimality. + for u in R: + p_u = R_nodes[u]["potential"] + for v, es in R_succ[u].items(): + for k, e in es.items(): + flow = e["capacity"] - e["flow"] + if e["weight"] - p_u + R_nodes[v]["potential"] < 0: + flow = e["capacity"] - e["flow"] + if flow >= delta: + e["flow"] += flow + R_succ[v][u][(k[0], not k[1])]["flow"] -= flow + R_nodes[u]["excess"] -= flow + R_nodes[v]["excess"] += flow + # Determine the Δ-active nodes. + S = set() + T = set() + S_add = S.add + S_remove = S.remove + T_add = T.add + T_remove = T.remove + for u in R: + excess = R_nodes[u]["excess"] + if excess >= delta: + S_add(u) + elif excess <= -delta: + T_add(u) + # Repeatedly augment flow from S to T along shortest paths until + # Δ-feasibility is achieved. + while S and T: + s = arbitrary_element(S) + t = None + # Search for a shortest path in terms of reduce costs from s to + # any t in T in the Δ-residual network. + d = {} + pred = {s: None} + h = heap() + h_insert = h.insert + h_get = h.get + h_insert(s, 0) + while h: + u, d_u = h.pop() + d[u] = d_u + if u in T: + # Path found. + t = u + break + p_u = R_nodes[u]["potential"] + for v, es in R_succ[u].items(): + if v in d: + continue + wmin = inf + # Find the minimum-weighted (u, v) Δ-residual edge. + for k, e in es.items(): + if e["capacity"] - e["flow"] >= delta: + w = e["weight"] + if w < wmin: + wmin = w + kmin = k + emin = e + if wmin == inf: + continue + # Update the distance label of v. + d_v = d_u + wmin - p_u + R_nodes[v]["potential"] + if h_insert(v, d_v): + pred[v] = (u, kmin, emin) + if t is not None: + # Augment Δ units of flow from s to t. + while u != s: + v = u + u, k, e = pred[v] + e["flow"] += delta + R_succ[v][u][(k[0], not k[1])]["flow"] -= delta + # Account node excess and deficit. + R_nodes[s]["excess"] -= delta + R_nodes[t]["excess"] += delta + if R_nodes[s]["excess"] < delta: + S_remove(s) + if R_nodes[t]["excess"] > -delta: + T_remove(t) + # Update node potentials. + d_t = d[t] + for u, d_u in d.items(): + R_nodes[u]["potential"] -= d_u - d_t + else: + # Path not found. + S_remove(s) + delta //= 2 + + if any(R.nodes[u]["excess"] != 0 for u in R): + raise nx.NetworkXUnfeasible("No flow satisfying all demands.") + + # Calculate the flow cost. + for u in R: + for v, es in R_succ[u].items(): + for e in es.values(): + flow = e["flow"] + if flow > 0: + flow_cost += flow * e["weight"] + + return flow_cost, _build_flow_dict(G, R, capacity, weight) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/dinitz_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/dinitz_alg.py new file mode 100644 index 0000000..f369642 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/dinitz_alg.py @@ -0,0 +1,238 @@ +""" +Dinitz' algorithm for maximum flow problems. +""" + +from collections import deque + +import networkx as nx +from networkx.algorithms.flow.utils import build_residual_network +from networkx.utils import pairwise + +__all__ = ["dinitz"] + + +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) +def dinitz(G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None): + """Find a maximum single-commodity flow using Dinitz' algorithm. + + This function returns the residual network resulting after computing + the maximum flow. See below for details about the conventions + NetworkX uses for defining residual networks. + + This algorithm has a running time of $O(n^2 m)$ for $n$ nodes and $m$ + edges [1]_. + + + Parameters + ---------- + G : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + s : node + Source node for the flow. + + t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + residual : NetworkX graph + Residual network on which the algorithm is to be executed. If None, a + new residual network is created. Default value: None. + + value_only : bool + If True compute only the value of the maximum flow. This parameter + will be ignored by this algorithm because it is not applicable. + + cutoff : integer, float + If specified, the algorithm will terminate when the flow value reaches + or exceeds the cutoff. In this case, it may be unable to immediately + determine a minimum cut. Default value: None. + + Returns + ------- + R : NetworkX DiGraph + Residual network after computing the maximum flow. + + Raises + ------ + NetworkXError + The algorithm does not support MultiGraph and MultiDiGraph. If + the input graph is an instance of one of these two classes, a + NetworkXError is raised. + + NetworkXUnbounded + If the graph has a path of infinite capacity, the value of a + feasible flow on the graph is unbounded above and the function + raises a NetworkXUnbounded. + + See also + -------- + :meth:`maximum_flow` + :meth:`minimum_cut` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Notes + ----- + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not + specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such + that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Examples + -------- + >>> from networkx.algorithms.flow import dinitz + + The functions that implement flow algorithms and output a residual + network, such as this one, are not imported to the base NetworkX + namespace, so you have to explicitly import them from the flow package. + + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = dinitz(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") + >>> flow_value + 3.0 + >>> flow_value == R.graph["flow_value"] + True + + References + ---------- + .. [1] Dinitz' Algorithm: The Original Version and Even's Version. + 2006. Yefim Dinitz. In Theoretical Computer Science. Lecture + Notes in Computer Science. Volume 3895. pp 218-240. + https://doi.org/10.1007/11685654_10 + + """ + R = dinitz_impl(G, s, t, capacity, residual, cutoff) + R.graph["algorithm"] = "dinitz" + nx._clear_cache(R) + return R + + +def dinitz_impl(G, s, t, capacity, residual, cutoff): + if s not in G: + raise nx.NetworkXError(f"node {str(s)} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {str(t)} not in graph") + if s == t: + raise nx.NetworkXError("source and sink are the same node") + + if residual is None: + R = build_residual_network(G, capacity) + else: + R = residual + + # Initialize/reset the residual network. + for u in R: + for e in R[u].values(): + e["flow"] = 0 + + # Use an arbitrary high value as infinite. It is computed + # when building the residual network. + INF = R.graph["inf"] + + if cutoff is None: + cutoff = INF + + R_succ = R.succ + R_pred = R.pred + + def breath_first_search(): + parents = {} + vertex_dist = {s: 0} + queue = deque([(s, 0)]) + # Record all the potential edges of shortest augmenting paths + while queue: + if t in parents: + break + u, dist = queue.popleft() + for v, attr in R_succ[u].items(): + if attr["capacity"] - attr["flow"] > 0: + if v in parents: + if vertex_dist[v] == dist + 1: + parents[v].append(u) + else: + parents[v] = deque([u]) + vertex_dist[v] = dist + 1 + queue.append((v, dist + 1)) + return parents + + def depth_first_search(parents): + # DFS to find all the shortest augmenting paths + """Build a path using DFS starting from the sink""" + total_flow = 0 + u = t + # path also functions as a stack + path = [u] + # The loop ends with no augmenting path left in the layered graph + while True: + if len(parents[u]) > 0: + v = parents[u][0] + path.append(v) + else: + path.pop() + if len(path) == 0: + break + v = path[-1] + parents[v].popleft() + # Augment the flow along the path found + if v == s: + flow = INF + for u, v in pairwise(path): + flow = min(flow, R_pred[u][v]["capacity"] - R_pred[u][v]["flow"]) + for u, v in pairwise(reversed(path)): + R_pred[v][u]["flow"] += flow + R_pred[u][v]["flow"] -= flow + # Find the proper node to continue the search + if R_pred[v][u]["capacity"] - R_pred[v][u]["flow"] == 0: + parents[v].popleft() + while path[-1] != v: + path.pop() + total_flow += flow + v = path[-1] + u = v + return total_flow + + flow_value = 0 + while flow_value < cutoff: + parents = breath_first_search() + if t not in parents: + break + this_flow = depth_first_search(parents) + if this_flow * 2 > INF: + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") + flow_value += this_flow + + R.graph["flow_value"] = flow_value + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/edmondskarp.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/edmondskarp.py new file mode 100644 index 0000000..5006326 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/edmondskarp.py @@ -0,0 +1,241 @@ +""" +Edmonds-Karp algorithm for maximum flow problems. +""" + +import networkx as nx +from networkx.algorithms.flow.utils import build_residual_network + +__all__ = ["edmonds_karp"] + + +def edmonds_karp_core(R, s, t, cutoff): + """Implementation of the Edmonds-Karp algorithm.""" + R_nodes = R.nodes + R_pred = R.pred + R_succ = R.succ + + inf = R.graph["inf"] + + def augment(path): + """Augment flow along a path from s to t.""" + # Determine the path residual capacity. + flow = inf + it = iter(path) + u = next(it) + for v in it: + attr = R_succ[u][v] + flow = min(flow, attr["capacity"] - attr["flow"]) + u = v + if flow * 2 > inf: + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") + # Augment flow along the path. + it = iter(path) + u = next(it) + for v in it: + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow + u = v + return flow + + def bidirectional_bfs(): + """Bidirectional breadth-first search for an augmenting path.""" + pred = {s: None} + q_s = [s] + succ = {t: None} + q_t = [t] + while True: + q = [] + if len(q_s) <= len(q_t): + for u in q_s: + for v, attr in R_succ[u].items(): + if v not in pred and attr["flow"] < attr["capacity"]: + pred[v] = u + if v in succ: + return v, pred, succ + q.append(v) + if not q: + return None, None, None + q_s = q + else: + for u in q_t: + for v, attr in R_pred[u].items(): + if v not in succ and attr["flow"] < attr["capacity"]: + succ[v] = u + if v in pred: + return v, pred, succ + q.append(v) + if not q: + return None, None, None + q_t = q + + # Look for shortest augmenting paths using breadth-first search. + flow_value = 0 + while flow_value < cutoff: + v, pred, succ = bidirectional_bfs() + if pred is None: + break + path = [v] + # Trace a path from s to v. + u = v + while u != s: + u = pred[u] + path.append(u) + path.reverse() + # Trace a path from v to t. + u = v + while u != t: + u = succ[u] + path.append(u) + flow_value += augment(path) + + return flow_value + + +def edmonds_karp_impl(G, s, t, capacity, residual, cutoff): + """Implementation of the Edmonds-Karp algorithm.""" + if s not in G: + raise nx.NetworkXError(f"node {str(s)} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {str(t)} not in graph") + if s == t: + raise nx.NetworkXError("source and sink are the same node") + + if residual is None: + R = build_residual_network(G, capacity) + else: + R = residual + + # Initialize/reset the residual network. + for u in R: + for e in R[u].values(): + e["flow"] = 0 + + if cutoff is None: + cutoff = float("inf") + R.graph["flow_value"] = edmonds_karp_core(R, s, t, cutoff) + + return R + + +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) +def edmonds_karp( + G, s, t, capacity="capacity", residual=None, value_only=False, cutoff=None +): + """Find a maximum single-commodity flow using the Edmonds-Karp algorithm. + + This function returns the residual network resulting after computing + the maximum flow. See below for details about the conventions + NetworkX uses for defining residual networks. + + This algorithm has a running time of $O(n m^2)$ for $n$ nodes and $m$ + edges. + + + Parameters + ---------- + G : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + s : node + Source node for the flow. + + t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + residual : NetworkX graph + Residual network on which the algorithm is to be executed. If None, a + new residual network is created. Default value: None. + + value_only : bool + If True compute only the value of the maximum flow. This parameter + will be ignored by this algorithm because it is not applicable. + + cutoff : integer, float + If specified, the algorithm will terminate when the flow value reaches + or exceeds the cutoff. In this case, it may be unable to immediately + determine a minimum cut. Default value: None. + + Returns + ------- + R : NetworkX DiGraph + Residual network after computing the maximum flow. + + Raises + ------ + NetworkXError + The algorithm does not support MultiGraph and MultiDiGraph. If + the input graph is an instance of one of these two classes, a + NetworkXError is raised. + + NetworkXUnbounded + If the graph has a path of infinite capacity, the value of a + feasible flow on the graph is unbounded above and the function + raises a NetworkXUnbounded. + + See also + -------- + :meth:`maximum_flow` + :meth:`minimum_cut` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Notes + ----- + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not + specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such + that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Examples + -------- + >>> from networkx.algorithms.flow import edmonds_karp + + The functions that implement flow algorithms and output a residual + network, such as this one, are not imported to the base NetworkX + namespace, so you have to explicitly import them from the flow package. + + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = edmonds_karp(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") + >>> flow_value + 3.0 + >>> flow_value == R.graph["flow_value"] + True + + """ + R = edmonds_karp_impl(G, s, t, capacity, residual, cutoff) + R.graph["algorithm"] = "edmonds_karp" + nx._clear_cache(R) + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/gomory_hu.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/gomory_hu.py new file mode 100644 index 0000000..69913da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/gomory_hu.py @@ -0,0 +1,178 @@ +""" +Gomory-Hu tree of undirected Graphs. +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +from .edmondskarp import edmonds_karp +from .utils import build_residual_network + +default_flow_func = edmonds_karp + +__all__ = ["gomory_hu_tree"] + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) +def gomory_hu_tree(G, capacity="capacity", flow_func=None): + r"""Returns the Gomory-Hu tree of an undirected graph G. + + A Gomory-Hu tree of an undirected graph with capacities is a + weighted tree that represents the minimum s-t cuts for all s-t + pairs in the graph. + + It only requires `n-1` minimum cut computations instead of the + obvious `n(n-1)/2`. The tree represents all s-t cuts as the + minimum cut value among any pair of nodes is the minimum edge + weight in the shortest path between the two nodes in the + Gomory-Hu tree. + + The Gomory-Hu tree also has the property that removing the + edge with the minimum weight in the shortest path between + any two nodes leaves two connected components that form + a partition of the nodes in G that defines the minimum s-t + cut. + + See Examples section below for details. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + flow_func : function + Function to perform the underlying flow computations. Default value + :func:`edmonds_karp`. This function performs better in sparse graphs + with right tailed degree distributions. + :func:`shortest_augmenting_path` will perform better in denser + graphs. + + Returns + ------- + Tree : NetworkX graph + A NetworkX graph representing the Gomory-Hu tree of the input graph. + + Raises + ------ + NetworkXNotImplemented + Raised if the input graph is directed. + + NetworkXError + Raised if the input graph is an empty Graph. + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> nx.set_edge_attributes(G, 1, "capacity") + >>> T = nx.gomory_hu_tree(G) + >>> # The value of the minimum cut between any pair + ... # of nodes in G is the minimum edge weight in the + ... # shortest path between the two nodes in the + ... # Gomory-Hu tree. + ... def minimum_edge_weight_in_shortest_path(T, u, v): + ... path = nx.shortest_path(T, u, v, weight="weight") + ... return min((T[u][v]["weight"], (u, v)) for (u, v) in zip(path, path[1:])) + >>> u, v = 0, 33 + >>> cut_value, edge = minimum_edge_weight_in_shortest_path(T, u, v) + >>> cut_value + 10 + >>> nx.minimum_cut_value(G, u, v) + 10 + >>> # The Gomory-Hu tree also has the property that removing the + ... # edge with the minimum weight in the shortest path between + ... # any two nodes leaves two connected components that form + ... # a partition of the nodes in G that defines the minimum s-t + ... # cut. + ... cut_value, edge = minimum_edge_weight_in_shortest_path(T, u, v) + >>> T.remove_edge(*edge) + >>> U, V = list(nx.connected_components(T)) + >>> # Thus U and V form a partition that defines a minimum cut + ... # between u and v in G. You can compute the edge cut set, + ... # that is, the set of edges that if removed from G will + ... # disconnect u from v in G, with this information: + ... cutset = set() + >>> for x, nbrs in ((n, G[n]) for n in U): + ... cutset.update((x, y) for y in nbrs if y in V) + >>> # Because we have set the capacities of all edges to 1 + ... # the cutset contains ten edges + ... len(cutset) + 10 + >>> # You can use any maximum flow algorithm for the underlying + ... # flow computations using the argument flow_func + ... from networkx.algorithms import flow + >>> T = nx.gomory_hu_tree(G, flow_func=flow.boykov_kolmogorov) + >>> cut_value, edge = minimum_edge_weight_in_shortest_path(T, u, v) + >>> cut_value + 10 + >>> nx.minimum_cut_value(G, u, v, flow_func=flow.boykov_kolmogorov) + 10 + + Notes + ----- + This implementation is based on Gusfield approach [1]_ to compute + Gomory-Hu trees, which does not require node contractions and has + the same computational complexity than the original method. + + See also + -------- + :func:`minimum_cut` + :func:`maximum_flow` + + References + ---------- + .. [1] Gusfield D: Very simple methods for all pairs network flow analysis. + SIAM J Comput 19(1):143-155, 1990. + + """ + if flow_func is None: + flow_func = default_flow_func + + if len(G) == 0: # empty graph + msg = "Empty Graph does not have a Gomory-Hu tree representation" + raise nx.NetworkXError(msg) + + # Start the tree as a star graph with an arbitrary node at the center + tree = {} + labels = {} + iter_nodes = iter(G) + root = next(iter_nodes) + for n in iter_nodes: + tree[n] = root + + # Reuse residual network + R = build_residual_network(G, capacity) + + # For all the leaves in the star graph tree (that is n-1 nodes). + for source in tree: + # Find neighbor in the tree + target = tree[source] + # compute minimum cut + cut_value, partition = nx.minimum_cut( + G, source, target, capacity=capacity, flow_func=flow_func, residual=R + ) + labels[(source, target)] = cut_value + # Update the tree + # Source will always be in partition[0] and target in partition[1] + for node in partition[0]: + if node != source and node in tree and tree[node] == target: + tree[node] = source + labels[node, source] = labels.get((node, target), cut_value) + # + if target != root and tree[target] in partition[0]: + labels[source, tree[target]] = labels[target, tree[target]] + labels[target, source] = cut_value + tree[source] = tree[target] + tree[target] = source + + # Build the tree + T = nx.Graph() + T.add_nodes_from(G) + T.add_weighted_edges_from(((u, v, labels[u, v]) for u, v in tree.items())) + return T diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/maxflow.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/maxflow.py new file mode 100644 index 0000000..93497a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/maxflow.py @@ -0,0 +1,611 @@ +""" +Maximum flow (and minimum cut) algorithms on capacitated graphs. +""" + +import networkx as nx + +from .boykovkolmogorov import boykov_kolmogorov +from .dinitz_alg import dinitz +from .edmondskarp import edmonds_karp +from .preflowpush import preflow_push +from .shortestaugmentingpath import shortest_augmenting_path +from .utils import build_flow_dict + +# Define the default flow function for computing maximum flow. +default_flow_func = preflow_push + +__all__ = ["maximum_flow", "maximum_flow_value", "minimum_cut", "minimum_cut_value"] + + +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) +def maximum_flow(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): + """Find a maximum single-commodity flow. + + Parameters + ---------- + flowG : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + _s : node + Source node for the flow. + + _t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + flow_func : function + A function for computing the maximum flow among a pair of nodes + in a capacitated graph. The function has to accept at least three + parameters: a Graph or Digraph, a source node, and a target node. + And return a residual network that follows NetworkX conventions + (see Notes). If flow_func is None, the default maximum + flow function (:meth:`preflow_push`) is used. See below for + alternative algorithms. The choice of the default function may change + from version to version and should not be relied on. Default value: + None. + + kwargs : Any other keyword parameter is passed to the function that + computes the maximum flow. + + Returns + ------- + flow_value : integer, float + Value of the maximum flow, i.e., net outflow from the source. + + flow_dict : dict + A dictionary containing the value of the flow that went through + each edge. + + Raises + ------ + NetworkXError + The algorithm does not support MultiGraph and MultiDiGraph. If + the input graph is an instance of one of these two classes, a + NetworkXError is raised. + + NetworkXUnbounded + If the graph has a path of infinite capacity, the value of a + feasible flow on the graph is unbounded above and the function + raises a NetworkXUnbounded. + + See also + -------- + :meth:`maximum_flow_value` + :meth:`minimum_cut` + :meth:`minimum_cut_value` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Notes + ----- + The function used in the flow_func parameter has to return a residual + network that follows NetworkX conventions: + + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using + only edges :samp:`(u, v)` such that + :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Specific algorithms may store extra data in :samp:`R`. + + The function should supports an optional boolean parameter value_only. When + True, it can optionally terminate the algorithm as soon as the maximum flow + value and the minimum cut can be determined. + + Note that the resulting maximum flow may contain flow cycles, + back-flow to the source, or some flow exiting the sink. + These are possible if there are cycles in the network. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + + maximum_flow returns both the value of the maximum flow and a + dictionary with all flows. + + >>> flow_value, flow_dict = nx.maximum_flow(G, "x", "y") + >>> flow_value + 3.0 + >>> print(flow_dict["x"]["b"]) + 1.0 + + You can also use alternative algorithms for computing the + maximum flow by using the flow_func parameter. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> flow_value == nx.maximum_flow(G, "x", "y", flow_func=shortest_augmenting_path)[ + ... 0 + ... ] + True + + """ + if flow_func is None: + if kwargs: + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) + flow_func = default_flow_func + + if not callable(flow_func): + raise nx.NetworkXError("flow_func has to be callable.") + + R = flow_func(flowG, _s, _t, capacity=capacity, value_only=False, **kwargs) + flow_dict = build_flow_dict(flowG, R) + + return (R.graph["flow_value"], flow_dict) + + +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) +def maximum_flow_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): + """Find the value of maximum single-commodity flow. + + Parameters + ---------- + flowG : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + _s : node + Source node for the flow. + + _t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + flow_func : function + A function for computing the maximum flow among a pair of nodes + in a capacitated graph. The function has to accept at least three + parameters: a Graph or Digraph, a source node, and a target node. + And return a residual network that follows NetworkX conventions + (see Notes). If flow_func is None, the default maximum + flow function (:meth:`preflow_push`) is used. See below for + alternative algorithms. The choice of the default function may change + from version to version and should not be relied on. Default value: + None. + + kwargs : Any other keyword parameter is passed to the function that + computes the maximum flow. + + Returns + ------- + flow_value : integer, float + Value of the maximum flow, i.e., net outflow from the source. + + Raises + ------ + NetworkXError + The algorithm does not support MultiGraph and MultiDiGraph. If + the input graph is an instance of one of these two classes, a + NetworkXError is raised. + + NetworkXUnbounded + If the graph has a path of infinite capacity, the value of a + feasible flow on the graph is unbounded above and the function + raises a NetworkXUnbounded. + + See also + -------- + :meth:`maximum_flow` + :meth:`minimum_cut` + :meth:`minimum_cut_value` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Notes + ----- + The function used in the flow_func parameter has to return a residual + network that follows NetworkX conventions: + + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using + only edges :samp:`(u, v)` such that + :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Specific algorithms may store extra data in :samp:`R`. + + The function should supports an optional boolean parameter value_only. When + True, it can optionally terminate the algorithm as soon as the maximum flow + value and the minimum cut can be determined. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + + maximum_flow_value computes only the value of the + maximum flow: + + >>> flow_value = nx.maximum_flow_value(G, "x", "y") + >>> flow_value + 3.0 + + You can also use alternative algorithms for computing the + maximum flow by using the flow_func parameter. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> flow_value == nx.maximum_flow_value( + ... G, "x", "y", flow_func=shortest_augmenting_path + ... ) + True + + """ + if flow_func is None: + if kwargs: + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) + flow_func = default_flow_func + + if not callable(flow_func): + raise nx.NetworkXError("flow_func has to be callable.") + + R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) + + return R.graph["flow_value"] + + +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) +def minimum_cut(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): + """Compute the value and the node partition of a minimum (s, t)-cut. + + Use the max-flow min-cut theorem, i.e., the capacity of a minimum + capacity cut is equal to the flow value of a maximum flow. + + Parameters + ---------- + flowG : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + _s : node + Source node for the flow. + + _t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + flow_func : function + A function for computing the maximum flow among a pair of nodes + in a capacitated graph. The function has to accept at least three + parameters: a Graph or Digraph, a source node, and a target node. + And return a residual network that follows NetworkX conventions + (see Notes). If flow_func is None, the default maximum + flow function (:meth:`preflow_push`) is used. See below for + alternative algorithms. The choice of the default function may change + from version to version and should not be relied on. Default value: + None. + + kwargs : Any other keyword parameter is passed to the function that + computes the maximum flow. + + Returns + ------- + cut_value : integer, float + Value of the minimum cut. + + partition : pair of node sets + A partitioning of the nodes that defines a minimum cut. + + Raises + ------ + NetworkXUnbounded + If the graph has a path of infinite capacity, all cuts have + infinite capacity and the function raises a NetworkXError. + + See also + -------- + :meth:`maximum_flow` + :meth:`maximum_flow_value` + :meth:`minimum_cut_value` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Notes + ----- + The function used in the flow_func parameter has to return a residual + network that follows NetworkX conventions: + + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using + only edges :samp:`(u, v)` such that + :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Specific algorithms may store extra data in :samp:`R`. + + The function should supports an optional boolean parameter value_only. When + True, it can optionally terminate the algorithm as soon as the maximum flow + value and the minimum cut can be determined. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + + minimum_cut computes both the value of the + minimum cut and the node partition: + + >>> cut_value, partition = nx.minimum_cut(G, "x", "y") + >>> reachable, non_reachable = partition + + 'partition' here is a tuple with the two sets of nodes that define + the minimum cut. You can compute the cut set of edges that induce + the minimum cut as follows: + + >>> cutset = set() + >>> for u, nbrs in ((n, G[n]) for n in reachable): + ... cutset.update((u, v) for v in nbrs if v in non_reachable) + >>> print(sorted(cutset)) + [('c', 'y'), ('x', 'b')] + >>> cut_value == sum(G.edges[u, v]["capacity"] for (u, v) in cutset) + True + + You can also use alternative algorithms for computing the + minimum cut by using the flow_func parameter. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> cut_value == nx.minimum_cut(G, "x", "y", flow_func=shortest_augmenting_path)[0] + True + + """ + if flow_func is None: + if kwargs: + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) + flow_func = default_flow_func + + if not callable(flow_func): + raise nx.NetworkXError("flow_func has to be callable.") + + if kwargs.get("cutoff") is not None and flow_func is preflow_push: + raise nx.NetworkXError("cutoff should not be specified.") + + R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) + # Remove saturated edges from the residual network + cutset = [(u, v, d) for u, v, d in R.edges(data=True) if d["flow"] == d["capacity"]] + R.remove_edges_from(cutset) + + # Then, reachable and non reachable nodes from source in the + # residual network form the node partition that defines + # the minimum cut. + non_reachable = set(nx.shortest_path_length(R, target=_t)) + partition = (set(flowG) - non_reachable, non_reachable) + # Finally add again cutset edges to the residual network to make + # sure that it is reusable. + R.add_edges_from(cutset) + return (R.graph["flow_value"], partition) + + +@nx._dispatchable(graphs="flowG", edge_attrs={"capacity": float("inf")}) +def minimum_cut_value(flowG, _s, _t, capacity="capacity", flow_func=None, **kwargs): + """Compute the value of a minimum (s, t)-cut. + + Use the max-flow min-cut theorem, i.e., the capacity of a minimum + capacity cut is equal to the flow value of a maximum flow. + + Parameters + ---------- + flowG : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + _s : node + Source node for the flow. + + _t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + flow_func : function + A function for computing the maximum flow among a pair of nodes + in a capacitated graph. The function has to accept at least three + parameters: a Graph or Digraph, a source node, and a target node. + And return a residual network that follows NetworkX conventions + (see Notes). If flow_func is None, the default maximum + flow function (:meth:`preflow_push`) is used. See below for + alternative algorithms. The choice of the default function may change + from version to version and should not be relied on. Default value: + None. + + kwargs : Any other keyword parameter is passed to the function that + computes the maximum flow. + + Returns + ------- + cut_value : integer, float + Value of the minimum cut. + + Raises + ------ + NetworkXUnbounded + If the graph has a path of infinite capacity, all cuts have + infinite capacity and the function raises a NetworkXError. + + See also + -------- + :meth:`maximum_flow` + :meth:`maximum_flow_value` + :meth:`minimum_cut` + :meth:`edmonds_karp` + :meth:`preflow_push` + :meth:`shortest_augmenting_path` + + Notes + ----- + The function used in the flow_func parameter has to return a residual + network that follows NetworkX conventions: + + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using + only edges :samp:`(u, v)` such that + :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Specific algorithms may store extra data in :samp:`R`. + + The function should supports an optional boolean parameter value_only. When + True, it can optionally terminate the algorithm as soon as the maximum flow + value and the minimum cut can be determined. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + + minimum_cut_value computes only the value of the + minimum cut: + + >>> cut_value = nx.minimum_cut_value(G, "x", "y") + >>> cut_value + 3.0 + + You can also use alternative algorithms for computing the + minimum cut by using the flow_func parameter. + + >>> from networkx.algorithms.flow import shortest_augmenting_path + >>> cut_value == nx.minimum_cut_value( + ... G, "x", "y", flow_func=shortest_augmenting_path + ... ) + True + + """ + if flow_func is None: + if kwargs: + raise nx.NetworkXError( + "You have to explicitly set a flow_func if" + " you need to pass parameters via kwargs." + ) + flow_func = default_flow_func + + if not callable(flow_func): + raise nx.NetworkXError("flow_func has to be callable.") + + if kwargs.get("cutoff") is not None and flow_func is preflow_push: + raise nx.NetworkXError("cutoff should not be specified.") + + R = flow_func(flowG, _s, _t, capacity=capacity, value_only=True, **kwargs) + + return R.graph["flow_value"] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/mincost.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/mincost.py new file mode 100644 index 0000000..2f9390d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/mincost.py @@ -0,0 +1,356 @@ +""" +Minimum cost flow algorithms on directed connected graphs. +""" + +__all__ = ["min_cost_flow_cost", "min_cost_flow", "cost_of_flow", "max_flow_min_cost"] + +import networkx as nx + + +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) +def min_cost_flow_cost(G, demand="demand", capacity="capacity", weight="weight"): + r"""Find the cost of a minimum cost flow satisfying all demands in digraph G. + + G is a digraph with edge costs and capacities and in which nodes + have demand, i.e., they want to send or receive some amount of + flow. A negative demand means that the node wants to send flow, a + positive demand means that the node want to receive flow. A flow on + the digraph G satisfies all demand if the net flow into each node + is equal to the demand of that node. + + Parameters + ---------- + G : NetworkX graph + DiGraph on which a minimum cost flow satisfying all demands is + to be found. + + demand : string + Nodes of the graph G are expected to have an attribute demand + that indicates how much flow a node wants to send (negative + demand) or receive (positive demand). Note that the sum of the + demands should be 0 otherwise the problem in not feasible. If + this attribute is not present, a node is considered to have 0 + demand. Default value: 'demand'. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + weight : string + Edges of the graph G are expected to have an attribute weight + that indicates the cost incurred by sending one unit of flow on + that edge. If not present, the weight is considered to be 0. + Default value: 'weight'. + + Returns + ------- + flowCost : integer, float + Cost of a minimum cost flow satisfying all demands. + + Raises + ------ + NetworkXError + This exception is raised if the input graph is not directed or + not connected. + + NetworkXUnfeasible + This exception is raised in the following situations: + + * The sum of the demands is not zero. Then, there is no + flow satisfying all demands. + * There is no flow satisfying all demand. + + NetworkXUnbounded + This exception is raised if the digraph G has a cycle of + negative cost and infinite capacity. Then, the cost of a flow + satisfying all demands is unbounded below. + + See also + -------- + cost_of_flow, max_flow_min_cost, min_cost_flow, network_simplex + + Notes + ----- + This algorithm is not guaranteed to work if edge weights or demands + are floating point numbers (overflows and roundoff errors can + cause problems). As a workaround you can use integer numbers by + multiplying the relevant edge attributes by a convenient + constant factor (eg 100). + + Examples + -------- + A simple example of a min cost flow problem. + + >>> G = nx.DiGraph() + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) + >>> flowCost = nx.min_cost_flow_cost(G) + >>> flowCost + 24 + """ + return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[0] + + +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) +def min_cost_flow(G, demand="demand", capacity="capacity", weight="weight"): + r"""Returns a minimum cost flow satisfying all demands in digraph G. + + G is a digraph with edge costs and capacities and in which nodes + have demand, i.e., they want to send or receive some amount of + flow. A negative demand means that the node wants to send flow, a + positive demand means that the node want to receive flow. A flow on + the digraph G satisfies all demand if the net flow into each node + is equal to the demand of that node. + + Parameters + ---------- + G : NetworkX graph + DiGraph on which a minimum cost flow satisfying all demands is + to be found. + + demand : string + Nodes of the graph G are expected to have an attribute demand + that indicates how much flow a node wants to send (negative + demand) or receive (positive demand). Note that the sum of the + demands should be 0 otherwise the problem in not feasible. If + this attribute is not present, a node is considered to have 0 + demand. Default value: 'demand'. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + weight : string + Edges of the graph G are expected to have an attribute weight + that indicates the cost incurred by sending one unit of flow on + that edge. If not present, the weight is considered to be 0. + Default value: 'weight'. + + Returns + ------- + flowDict : dictionary + Dictionary of dictionaries keyed by nodes such that + flowDict[u][v] is the flow edge (u, v). + + Raises + ------ + NetworkXError + This exception is raised if the input graph is not directed or + not connected. + + NetworkXUnfeasible + This exception is raised in the following situations: + + * The sum of the demands is not zero. Then, there is no + flow satisfying all demands. + * There is no flow satisfying all demand. + + NetworkXUnbounded + This exception is raised if the digraph G has a cycle of + negative cost and infinite capacity. Then, the cost of a flow + satisfying all demands is unbounded below. + + See also + -------- + cost_of_flow, max_flow_min_cost, min_cost_flow_cost, network_simplex + + Notes + ----- + This algorithm is not guaranteed to work if edge weights or demands + are floating point numbers (overflows and roundoff errors can + cause problems). As a workaround you can use integer numbers by + multiplying the relevant edge attributes by a convenient + constant factor (eg 100). + + Examples + -------- + A simple example of a min cost flow problem. + + >>> G = nx.DiGraph() + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) + >>> flowDict = nx.min_cost_flow(G) + >>> flowDict + {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}} + """ + return nx.network_simplex(G, demand=demand, capacity=capacity, weight=weight)[1] + + +@nx._dispatchable(edge_attrs={"weight": 0}) +def cost_of_flow(G, flowDict, weight="weight"): + """Compute the cost of the flow given by flowDict on graph G. + + Note that this function does not check for the validity of the + flow flowDict. This function will fail if the graph G and the + flow don't have the same edge set. + + Parameters + ---------- + G : NetworkX graph + DiGraph on which a minimum cost flow satisfying all demands is + to be found. + + weight : string + Edges of the graph G are expected to have an attribute weight + that indicates the cost incurred by sending one unit of flow on + that edge. If not present, the weight is considered to be 0. + Default value: 'weight'. + + flowDict : dictionary + Dictionary of dictionaries keyed by nodes such that + flowDict[u][v] is the flow edge (u, v). + + Returns + ------- + cost : Integer, float + The total cost of the flow. This is given by the sum over all + edges of the product of the edge's flow and the edge's weight. + + See also + -------- + max_flow_min_cost, min_cost_flow, min_cost_flow_cost, network_simplex + + Notes + ----- + This algorithm is not guaranteed to work if edge weights or demands + are floating point numbers (overflows and roundoff errors can + cause problems). As a workaround you can use integer numbers by + multiplying the relevant edge attributes by a convenient + constant factor (eg 100). + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) + >>> flowDict = nx.min_cost_flow(G) + >>> flowDict + {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}} + >>> nx.cost_of_flow(G, flowDict) + 24 + """ + return sum((flowDict[u][v] * d.get(weight, 0) for u, v, d in G.edges(data=True))) + + +@nx._dispatchable(edge_attrs={"capacity": float("inf"), "weight": 0}) +def max_flow_min_cost(G, s, t, capacity="capacity", weight="weight"): + """Returns a maximum (s, t)-flow of minimum cost. + + G is a digraph with edge costs and capacities. There is a source + node s and a sink node t. This function finds a maximum flow from + s to t whose total cost is minimized. + + Parameters + ---------- + G : NetworkX graph + DiGraph on which a minimum cost flow satisfying all demands is + to be found. + + s: node label + Source of the flow. + + t: node label + Destination of the flow. + + capacity: string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + weight: string + Edges of the graph G are expected to have an attribute weight + that indicates the cost incurred by sending one unit of flow on + that edge. If not present, the weight is considered to be 0. + Default value: 'weight'. + + Returns + ------- + flowDict: dictionary + Dictionary of dictionaries keyed by nodes such that + flowDict[u][v] is the flow edge (u, v). + + Raises + ------ + NetworkXError + This exception is raised if the input graph is not directed or + not connected. + + NetworkXUnbounded + This exception is raised if there is an infinite capacity path + from s to t in G. In this case there is no maximum flow. This + exception is also raised if the digraph G has a cycle of + negative cost and infinite capacity. Then, the cost of a flow + is unbounded below. + + See also + -------- + cost_of_flow, min_cost_flow, min_cost_flow_cost, network_simplex + + Notes + ----- + This algorithm is not guaranteed to work if edge weights or demands + are floating point numbers (overflows and roundoff errors can + cause problems). As a workaround you can use integer numbers by + multiplying the relevant edge attributes by a convenient + constant factor (eg 100). + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edges_from( + ... [ + ... (1, 2, {"capacity": 12, "weight": 4}), + ... (1, 3, {"capacity": 20, "weight": 6}), + ... (2, 3, {"capacity": 6, "weight": -3}), + ... (2, 6, {"capacity": 14, "weight": 1}), + ... (3, 4, {"weight": 9}), + ... (3, 5, {"capacity": 10, "weight": 5}), + ... (4, 2, {"capacity": 19, "weight": 13}), + ... (4, 5, {"capacity": 4, "weight": 0}), + ... (5, 7, {"capacity": 28, "weight": 2}), + ... (6, 5, {"capacity": 11, "weight": 1}), + ... (6, 7, {"weight": 8}), + ... (7, 4, {"capacity": 6, "weight": 6}), + ... ] + ... ) + >>> mincostFlow = nx.max_flow_min_cost(G, 1, 7) + >>> mincost = nx.cost_of_flow(G, mincostFlow) + >>> mincost + 373 + >>> from networkx.algorithms.flow import maximum_flow + >>> maxFlow = maximum_flow(G, 1, 7)[1] + >>> nx.cost_of_flow(G, maxFlow) >= mincost + True + >>> mincostFlowValue = sum((mincostFlow[u][7] for u in G.predecessors(7))) - sum( + ... (mincostFlow[7][v] for v in G.successors(7)) + ... ) + >>> mincostFlowValue == nx.maximum_flow_value(G, 1, 7) + True + + """ + maxFlow = nx.maximum_flow_value(G, s, t, capacity=capacity) + H = nx.DiGraph(G) + H.add_node(s, demand=-maxFlow) + H.add_node(t, demand=maxFlow) + return min_cost_flow(H, capacity=capacity, weight=weight) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/networksimplex.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/networksimplex.py new file mode 100644 index 0000000..5baa976 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/networksimplex.py @@ -0,0 +1,662 @@ +""" +Minimum cost flow algorithms on directed connected graphs. +""" + +__all__ = ["network_simplex"] + +from itertools import chain, islice, repeat +from math import ceil, sqrt + +import networkx as nx +from networkx.utils import not_implemented_for + + +class _DataEssentialsAndFunctions: + def __init__( + self, G, multigraph, demand="demand", capacity="capacity", weight="weight" + ): + # Number all nodes and edges and hereafter reference them using ONLY their numbers + self.node_list = list(G) # nodes + self.node_indices = {u: i for i, u in enumerate(self.node_list)} # node indices + self.node_demands = [ + G.nodes[u].get(demand, 0) for u in self.node_list + ] # node demands + + self.edge_sources = [] # edge sources + self.edge_targets = [] # edge targets + if multigraph: + self.edge_keys = [] # edge keys + self.edge_indices = {} # edge indices + self.edge_capacities = [] # edge capacities + self.edge_weights = [] # edge weights + + if not multigraph: + edges = G.edges(data=True) + else: + edges = G.edges(data=True, keys=True) + + inf = float("inf") + edges = (e for e in edges if e[0] != e[1] and e[-1].get(capacity, inf) != 0) + for i, e in enumerate(edges): + self.edge_sources.append(self.node_indices[e[0]]) + self.edge_targets.append(self.node_indices[e[1]]) + if multigraph: + self.edge_keys.append(e[2]) + self.edge_indices[e[:-1]] = i + self.edge_capacities.append(e[-1].get(capacity, inf)) + self.edge_weights.append(e[-1].get(weight, 0)) + + # spanning tree specific data to be initialized + + self.edge_count = None # number of edges + self.edge_flow = None # edge flows + self.node_potentials = None # node potentials + self.parent = None # parent nodes + self.parent_edge = None # edges to parents + self.subtree_size = None # subtree sizes + self.next_node_dft = None # next nodes in depth-first thread + self.prev_node_dft = None # previous nodes in depth-first thread + self.last_descendent_dft = None # last descendants in depth-first thread + self._spanning_tree_initialized = ( + False # False until initialize_spanning_tree() is called + ) + + def initialize_spanning_tree(self, n, faux_inf): + self.edge_count = len(self.edge_indices) # number of edges + self.edge_flow = list( + chain(repeat(0, self.edge_count), (abs(d) for d in self.node_demands)) + ) # edge flows + self.node_potentials = [ + faux_inf if d <= 0 else -faux_inf for d in self.node_demands + ] # node potentials + self.parent = list(chain(repeat(-1, n), [None])) # parent nodes + self.parent_edge = list( + range(self.edge_count, self.edge_count + n) + ) # edges to parents + self.subtree_size = list(chain(repeat(1, n), [n + 1])) # subtree sizes + self.next_node_dft = list( + chain(range(1, n), [-1, 0]) + ) # next nodes in depth-first thread + self.prev_node_dft = list(range(-1, n)) # previous nodes in depth-first thread + self.last_descendent_dft = list( + chain(range(n), [n - 1]) + ) # last descendants in depth-first thread + self._spanning_tree_initialized = True # True only if all the assignments pass + + def find_apex(self, p, q): + """ + Find the lowest common ancestor of nodes p and q in the spanning tree. + """ + size_p = self.subtree_size[p] + size_q = self.subtree_size[q] + while True: + while size_p < size_q: + p = self.parent[p] + size_p = self.subtree_size[p] + while size_p > size_q: + q = self.parent[q] + size_q = self.subtree_size[q] + if size_p == size_q: + if p != q: + p = self.parent[p] + size_p = self.subtree_size[p] + q = self.parent[q] + size_q = self.subtree_size[q] + else: + return p + + def trace_path(self, p, w): + """ + Returns the nodes and edges on the path from node p to its ancestor w. + """ + Wn = [p] + We = [] + while p != w: + We.append(self.parent_edge[p]) + p = self.parent[p] + Wn.append(p) + return Wn, We + + def find_cycle(self, i, p, q): + """ + Returns the nodes and edges on the cycle containing edge i == (p, q) + when the latter is added to the spanning tree. + + The cycle is oriented in the direction from p to q. + """ + w = self.find_apex(p, q) + Wn, We = self.trace_path(p, w) + Wn.reverse() + We.reverse() + if We != [i]: + We.append(i) + WnR, WeR = self.trace_path(q, w) + del WnR[-1] + Wn += WnR + We += WeR + return Wn, We + + def augment_flow(self, Wn, We, f): + """ + Augment f units of flow along a cycle represented by Wn and We. + """ + for i, p in zip(We, Wn): + if self.edge_sources[i] == p: + self.edge_flow[i] += f + else: + self.edge_flow[i] -= f + + def trace_subtree(self, p): + """ + Yield the nodes in the subtree rooted at a node p. + """ + yield p + l = self.last_descendent_dft[p] + while p != l: + p = self.next_node_dft[p] + yield p + + def remove_edge(self, s, t): + """ + Remove an edge (s, t) where parent[t] == s from the spanning tree. + """ + size_t = self.subtree_size[t] + prev_t = self.prev_node_dft[t] + last_t = self.last_descendent_dft[t] + next_last_t = self.next_node_dft[last_t] + # Remove (s, t). + self.parent[t] = None + self.parent_edge[t] = None + # Remove the subtree rooted at t from the depth-first thread. + self.next_node_dft[prev_t] = next_last_t + self.prev_node_dft[next_last_t] = prev_t + self.next_node_dft[last_t] = t + self.prev_node_dft[t] = last_t + # Update the subtree sizes and last descendants of the (old) ancestors + # of t. + while s is not None: + self.subtree_size[s] -= size_t + if self.last_descendent_dft[s] == last_t: + self.last_descendent_dft[s] = prev_t + s = self.parent[s] + + def make_root(self, q): + """ + Make a node q the root of its containing subtree. + """ + ancestors = [] + while q is not None: + ancestors.append(q) + q = self.parent[q] + ancestors.reverse() + for p, q in zip(ancestors, islice(ancestors, 1, None)): + size_p = self.subtree_size[p] + last_p = self.last_descendent_dft[p] + prev_q = self.prev_node_dft[q] + last_q = self.last_descendent_dft[q] + next_last_q = self.next_node_dft[last_q] + # Make p a child of q. + self.parent[p] = q + self.parent[q] = None + self.parent_edge[p] = self.parent_edge[q] + self.parent_edge[q] = None + self.subtree_size[p] = size_p - self.subtree_size[q] + self.subtree_size[q] = size_p + # Remove the subtree rooted at q from the depth-first thread. + self.next_node_dft[prev_q] = next_last_q + self.prev_node_dft[next_last_q] = prev_q + self.next_node_dft[last_q] = q + self.prev_node_dft[q] = last_q + if last_p == last_q: + self.last_descendent_dft[p] = prev_q + last_p = prev_q + # Add the remaining parts of the subtree rooted at p as a subtree + # of q in the depth-first thread. + self.prev_node_dft[p] = last_q + self.next_node_dft[last_q] = p + self.next_node_dft[last_p] = q + self.prev_node_dft[q] = last_p + self.last_descendent_dft[q] = last_p + + def add_edge(self, i, p, q): + """ + Add an edge (p, q) to the spanning tree where q is the root of a subtree. + """ + last_p = self.last_descendent_dft[p] + next_last_p = self.next_node_dft[last_p] + size_q = self.subtree_size[q] + last_q = self.last_descendent_dft[q] + # Make q a child of p. + self.parent[q] = p + self.parent_edge[q] = i + # Insert the subtree rooted at q into the depth-first thread. + self.next_node_dft[last_p] = q + self.prev_node_dft[q] = last_p + self.prev_node_dft[next_last_p] = last_q + self.next_node_dft[last_q] = next_last_p + # Update the subtree sizes and last descendants of the (new) ancestors + # of q. + while p is not None: + self.subtree_size[p] += size_q + if self.last_descendent_dft[p] == last_p: + self.last_descendent_dft[p] = last_q + p = self.parent[p] + + def update_potentials(self, i, p, q): + """ + Update the potentials of the nodes in the subtree rooted at a node + q connected to its parent p by an edge i. + """ + if q == self.edge_targets[i]: + d = self.node_potentials[p] - self.edge_weights[i] - self.node_potentials[q] + else: + d = self.node_potentials[p] + self.edge_weights[i] - self.node_potentials[q] + for q in self.trace_subtree(q): + self.node_potentials[q] += d + + def reduced_cost(self, i): + """Returns the reduced cost of an edge i.""" + c = ( + self.edge_weights[i] + - self.node_potentials[self.edge_sources[i]] + + self.node_potentials[self.edge_targets[i]] + ) + return c if self.edge_flow[i] == 0 else -c + + def find_entering_edges(self): + """Yield entering edges until none can be found.""" + if self.edge_count == 0: + return + + # Entering edges are found by combining Dantzig's rule and Bland's + # rule. The edges are cyclically grouped into blocks of size B. Within + # each block, Dantzig's rule is applied to find an entering edge. The + # blocks to search is determined following Bland's rule. + B = int(ceil(sqrt(self.edge_count))) # pivot block size + M = (self.edge_count + B - 1) // B # number of blocks needed to cover all edges + m = 0 # number of consecutive blocks without eligible + # entering edges + f = 0 # first edge in block + while m < M: + # Determine the next block of edges. + l = f + B + if l <= self.edge_count: + edges = range(f, l) + else: + l -= self.edge_count + edges = chain(range(f, self.edge_count), range(l)) + f = l + # Find the first edge with the lowest reduced cost. + i = min(edges, key=self.reduced_cost) + c = self.reduced_cost(i) + if c >= 0: + # No entering edge found in the current block. + m += 1 + else: + # Entering edge found. + if self.edge_flow[i] == 0: + p = self.edge_sources[i] + q = self.edge_targets[i] + else: + p = self.edge_targets[i] + q = self.edge_sources[i] + yield i, p, q + m = 0 + # All edges have nonnegative reduced costs. The current flow is + # optimal. + + def residual_capacity(self, i, p): + """Returns the residual capacity of an edge i in the direction away + from its endpoint p. + """ + return ( + self.edge_capacities[i] - self.edge_flow[i] + if self.edge_sources[i] == p + else self.edge_flow[i] + ) + + def find_leaving_edge(self, Wn, We): + """Returns the leaving edge in a cycle represented by Wn and We.""" + j, s = min( + zip(reversed(We), reversed(Wn)), + key=lambda i_p: self.residual_capacity(*i_p), + ) + t = self.edge_targets[j] if self.edge_sources[j] == s else self.edge_sources[j] + return j, s, t + + +@not_implemented_for("undirected") +@nx._dispatchable( + node_attrs="demand", edge_attrs={"capacity": float("inf"), "weight": 0} +) +def network_simplex(G, demand="demand", capacity="capacity", weight="weight"): + r"""Find a minimum cost flow satisfying all demands in digraph G. + + This is a primal network simplex algorithm that uses the leaving + arc rule to prevent cycling. + + G is a digraph with edge costs and capacities and in which nodes + have demand, i.e., they want to send or receive some amount of + flow. A negative demand means that the node wants to send flow, a + positive demand means that the node want to receive flow. A flow on + the digraph G satisfies all demand if the net flow into each node + is equal to the demand of that node. + + Parameters + ---------- + G : NetworkX graph + DiGraph on which a minimum cost flow satisfying all demands is + to be found. + + demand : string + Nodes of the graph G are expected to have an attribute demand + that indicates how much flow a node wants to send (negative + demand) or receive (positive demand). Note that the sum of the + demands should be 0 otherwise the problem in not feasible. If + this attribute is not present, a node is considered to have 0 + demand. Default value: 'demand'. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + weight : string + Edges of the graph G are expected to have an attribute weight + that indicates the cost incurred by sending one unit of flow on + that edge. If not present, the weight is considered to be 0. + Default value: 'weight'. + + Returns + ------- + flowCost : integer, float + Cost of a minimum cost flow satisfying all demands. + + flowDict : dictionary + Dictionary of dictionaries keyed by nodes such that + flowDict[u][v] is the flow edge (u, v). + + Raises + ------ + NetworkXError + This exception is raised if the input graph is not directed or + not connected. + + NetworkXUnfeasible + This exception is raised in the following situations: + + * The sum of the demands is not zero. Then, there is no + flow satisfying all demands. + * There is no flow satisfying all demand. + + NetworkXUnbounded + This exception is raised if the digraph G has a cycle of + negative cost and infinite capacity. Then, the cost of a flow + satisfying all demands is unbounded below. + + Notes + ----- + This algorithm is not guaranteed to work if edge weights or demands + are floating point numbers (overflows and roundoff errors can + cause problems). As a workaround you can use integer numbers by + multiplying the relevant edge attributes by a convenient + constant factor (eg 100). + + See also + -------- + cost_of_flow, max_flow_min_cost, min_cost_flow, min_cost_flow_cost + + Examples + -------- + A simple example of a min cost flow problem. + + >>> G = nx.DiGraph() + >>> G.add_node("a", demand=-5) + >>> G.add_node("d", demand=5) + >>> G.add_edge("a", "b", weight=3, capacity=4) + >>> G.add_edge("a", "c", weight=6, capacity=10) + >>> G.add_edge("b", "d", weight=1, capacity=9) + >>> G.add_edge("c", "d", weight=2, capacity=5) + >>> flowCost, flowDict = nx.network_simplex(G) + >>> flowCost + 24 + >>> flowDict + {'a': {'b': 4, 'c': 1}, 'd': {}, 'b': {'d': 4}, 'c': {'d': 1}} + + The mincost flow algorithm can also be used to solve shortest path + problems. To find the shortest path between two nodes u and v, + give all edges an infinite capacity, give node u a demand of -1 and + node v a demand a 1. Then run the network simplex. The value of a + min cost flow will be the distance between u and v and edges + carrying positive flow will indicate the path. + + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... [ + ... ("s", "u", 10), + ... ("s", "x", 5), + ... ("u", "v", 1), + ... ("u", "x", 2), + ... ("v", "y", 1), + ... ("x", "u", 3), + ... ("x", "v", 5), + ... ("x", "y", 2), + ... ("y", "s", 7), + ... ("y", "v", 6), + ... ] + ... ) + >>> G.add_node("s", demand=-1) + >>> G.add_node("v", demand=1) + >>> flowCost, flowDict = nx.network_simplex(G) + >>> flowCost == nx.shortest_path_length(G, "s", "v", weight="weight") + True + >>> sorted([(u, v) for u in flowDict for v in flowDict[u] if flowDict[u][v] > 0]) + [('s', 'x'), ('u', 'v'), ('x', 'u')] + >>> nx.shortest_path(G, "s", "v", weight="weight") + ['s', 'x', 'u', 'v'] + + It is possible to change the name of the attributes used for the + algorithm. + + >>> G = nx.DiGraph() + >>> G.add_node("p", spam=-4) + >>> G.add_node("q", spam=2) + >>> G.add_node("a", spam=-2) + >>> G.add_node("d", spam=-1) + >>> G.add_node("t", spam=2) + >>> G.add_node("w", spam=3) + >>> G.add_edge("p", "q", cost=7, vacancies=5) + >>> G.add_edge("p", "a", cost=1, vacancies=4) + >>> G.add_edge("q", "d", cost=2, vacancies=3) + >>> G.add_edge("t", "q", cost=1, vacancies=2) + >>> G.add_edge("a", "t", cost=2, vacancies=4) + >>> G.add_edge("d", "w", cost=3, vacancies=4) + >>> G.add_edge("t", "w", cost=4, vacancies=1) + >>> flowCost, flowDict = nx.network_simplex( + ... G, demand="spam", capacity="vacancies", weight="cost" + ... ) + >>> flowCost + 37 + >>> flowDict + {'p': {'q': 2, 'a': 2}, 'q': {'d': 1}, 'a': {'t': 4}, 'd': {'w': 2}, 't': {'q': 1, 'w': 1}, 'w': {}} + + References + ---------- + .. [1] Z. Kiraly, P. Kovacs. + Efficient implementation of minimum-cost flow algorithms. + Acta Universitatis Sapientiae, Informatica 4(1):67--118. 2012. + .. [2] R. Barr, F. Glover, D. Klingman. + Enhancement of spanning tree labeling procedures for network + optimization. + INFOR 17(1):16--34. 1979. + """ + ########################################################################### + # Problem essentials extraction and sanity check + ########################################################################### + + if len(G) == 0: + raise nx.NetworkXError("graph has no nodes") + + multigraph = G.is_multigraph() + + # extracting data essential to problem + DEAF = _DataEssentialsAndFunctions( + G, multigraph, demand=demand, capacity=capacity, weight=weight + ) + + ########################################################################### + # Quick Error Detection + ########################################################################### + + inf = float("inf") + for u, d in zip(DEAF.node_list, DEAF.node_demands): + if abs(d) == inf: + raise nx.NetworkXError(f"node {u!r} has infinite demand") + for e, w in zip(DEAF.edge_indices, DEAF.edge_weights): + if abs(w) == inf: + raise nx.NetworkXError(f"edge {e!r} has infinite weight") + if not multigraph: + edges = nx.selfloop_edges(G, data=True) + else: + edges = nx.selfloop_edges(G, data=True, keys=True) + for e in edges: + if abs(e[-1].get(weight, 0)) == inf: + raise nx.NetworkXError(f"edge {e[:-1]!r} has infinite weight") + + ########################################################################### + # Quick Infeasibility Detection + ########################################################################### + + if sum(DEAF.node_demands) != 0: + raise nx.NetworkXUnfeasible("total node demand is not zero") + for e, c in zip(DEAF.edge_indices, DEAF.edge_capacities): + if c < 0: + raise nx.NetworkXUnfeasible(f"edge {e!r} has negative capacity") + if not multigraph: + edges = nx.selfloop_edges(G, data=True) + else: + edges = nx.selfloop_edges(G, data=True, keys=True) + for e in edges: + if e[-1].get(capacity, inf) < 0: + raise nx.NetworkXUnfeasible(f"edge {e[:-1]!r} has negative capacity") + + ########################################################################### + # Initialization + ########################################################################### + + # Add a dummy node -1 and connect all existing nodes to it with infinite- + # capacity dummy edges. Node -1 will serve as the root of the + # spanning tree of the network simplex method. The new edges will used to + # trivially satisfy the node demands and create an initial strongly + # feasible spanning tree. + for i, d in enumerate(DEAF.node_demands): + # Must be greater-than here. Zero-demand nodes must have + # edges pointing towards the root to ensure strong feasibility. + if d > 0: + DEAF.edge_sources.append(-1) + DEAF.edge_targets.append(i) + else: + DEAF.edge_sources.append(i) + DEAF.edge_targets.append(-1) + faux_inf = ( + 3 + * max( + sum(c for c in DEAF.edge_capacities if c < inf), + sum(abs(w) for w in DEAF.edge_weights), + sum(abs(d) for d in DEAF.node_demands), + ) + or 1 + ) + + n = len(DEAF.node_list) # number of nodes + DEAF.edge_weights.extend(repeat(faux_inf, n)) + DEAF.edge_capacities.extend(repeat(faux_inf, n)) + + # Construct the initial spanning tree. + DEAF.initialize_spanning_tree(n, faux_inf) + + ########################################################################### + # Pivot loop + ########################################################################### + + for i, p, q in DEAF.find_entering_edges(): + Wn, We = DEAF.find_cycle(i, p, q) + j, s, t = DEAF.find_leaving_edge(Wn, We) + DEAF.augment_flow(Wn, We, DEAF.residual_capacity(j, s)) + # Do nothing more if the entering edge is the same as the leaving edge. + if i != j: + if DEAF.parent[t] != s: + # Ensure that s is the parent of t. + s, t = t, s + if We.index(i) > We.index(j): + # Ensure that q is in the subtree rooted at t. + p, q = q, p + DEAF.remove_edge(s, t) + DEAF.make_root(q) + DEAF.add_edge(i, p, q) + DEAF.update_potentials(i, p, q) + + ########################################################################### + # Infeasibility and unboundedness detection + ########################################################################### + + if any(DEAF.edge_flow[i] != 0 for i in range(-n, 0)): + raise nx.NetworkXUnfeasible("no flow satisfies all node demands") + + if any(DEAF.edge_flow[i] * 2 >= faux_inf for i in range(DEAF.edge_count)) or any( + e[-1].get(capacity, inf) == inf and e[-1].get(weight, 0) < 0 + for e in nx.selfloop_edges(G, data=True) + ): + raise nx.NetworkXUnbounded("negative cycle with infinite capacity found") + + ########################################################################### + # Flow cost calculation and flow dict construction + ########################################################################### + + del DEAF.edge_flow[DEAF.edge_count :] + flow_cost = sum(w * x for w, x in zip(DEAF.edge_weights, DEAF.edge_flow)) + flow_dict = {n: {} for n in DEAF.node_list} + + def add_entry(e): + """Add a flow dict entry.""" + d = flow_dict[e[0]] + for k in e[1:-2]: + try: + d = d[k] + except KeyError: + t = {} + d[k] = t + d = t + d[e[-2]] = e[-1] + + DEAF.edge_sources = ( + DEAF.node_list[s] for s in DEAF.edge_sources + ) # Use original nodes. + DEAF.edge_targets = ( + DEAF.node_list[t] for t in DEAF.edge_targets + ) # Use original nodes. + if not multigraph: + for e in zip(DEAF.edge_sources, DEAF.edge_targets, DEAF.edge_flow): + add_entry(e) + edges = G.edges(data=True) + else: + for e in zip( + DEAF.edge_sources, DEAF.edge_targets, DEAF.edge_keys, DEAF.edge_flow + ): + add_entry(e) + edges = G.edges(data=True, keys=True) + for e in edges: + if e[0] != e[1]: + if e[-1].get(capacity, inf) == 0: + add_entry(e[:-1] + (0,)) + else: + w = e[-1].get(weight, 0) + if w >= 0: + add_entry(e[:-1] + (0,)) + else: + c = e[-1][capacity] + flow_cost += w * c + add_entry(e[:-1] + (c,)) + + return flow_cost, flow_dict diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/preflowpush.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/preflowpush.py new file mode 100644 index 0000000..42cadc2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/preflowpush.py @@ -0,0 +1,425 @@ +""" +Highest-label preflow-push algorithm for maximum flow problems. +""" + +from collections import deque +from itertools import islice + +import networkx as nx + +from ...utils import arbitrary_element +from .utils import ( + CurrentEdge, + GlobalRelabelThreshold, + Level, + build_residual_network, + detect_unboundedness, +) + +__all__ = ["preflow_push"] + + +def preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only): + """Implementation of the highest-label preflow-push algorithm.""" + if s not in G: + raise nx.NetworkXError(f"node {str(s)} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {str(t)} not in graph") + if s == t: + raise nx.NetworkXError("source and sink are the same node") + + if global_relabel_freq is None: + global_relabel_freq = 0 + if global_relabel_freq < 0: + raise nx.NetworkXError("global_relabel_freq must be nonnegative.") + + if residual is None: + R = build_residual_network(G, capacity) + else: + R = residual + + detect_unboundedness(R, s, t) + + R_nodes = R.nodes + R_pred = R.pred + R_succ = R.succ + + # Initialize/reset the residual network. + for u in R: + R_nodes[u]["excess"] = 0 + for e in R_succ[u].values(): + e["flow"] = 0 + + def reverse_bfs(src): + """Perform a reverse breadth-first search from src in the residual + network. + """ + heights = {src: 0} + q = deque([(src, 0)]) + while q: + u, height = q.popleft() + height += 1 + for v, attr in R_pred[u].items(): + if v not in heights and attr["flow"] < attr["capacity"]: + heights[v] = height + q.append((v, height)) + return heights + + # Initialize heights of the nodes. + heights = reverse_bfs(t) + + if s not in heights: + # t is not reachable from s in the residual network. The maximum flow + # must be zero. + R.graph["flow_value"] = 0 + return R + + n = len(R) + # max_height represents the height of the highest level below level n with + # at least one active node. + max_height = max(heights[u] for u in heights if u != s) + heights[s] = n + + grt = GlobalRelabelThreshold(n, R.size(), global_relabel_freq) + + # Initialize heights and 'current edge' data structures of the nodes. + for u in R: + R_nodes[u]["height"] = heights[u] if u in heights else n + 1 + R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u]) + + def push(u, v, flow): + """Push flow units of flow from u to v.""" + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow + R_nodes[u]["excess"] -= flow + R_nodes[v]["excess"] += flow + + # The maximum flow must be nonzero now. Initialize the preflow by + # saturating all edges emanating from s. + for u, attr in R_succ[s].items(): + flow = attr["capacity"] + if flow > 0: + push(s, u, flow) + + # Partition nodes into levels. + levels = [Level() for i in range(2 * n)] + for u in R: + if u != s and u != t: + level = levels[R_nodes[u]["height"]] + if R_nodes[u]["excess"] > 0: + level.active.add(u) + else: + level.inactive.add(u) + + def activate(v): + """Move a node from the inactive set to the active set of its level.""" + if v != s and v != t: + level = levels[R_nodes[v]["height"]] + if v in level.inactive: + level.inactive.remove(v) + level.active.add(v) + + def relabel(u): + """Relabel a node to create an admissible edge.""" + grt.add_work(len(R_succ[u])) + return ( + min( + R_nodes[v]["height"] + for v, attr in R_succ[u].items() + if attr["flow"] < attr["capacity"] + ) + + 1 + ) + + def discharge(u, is_phase1): + """Discharge a node until it becomes inactive or, during phase 1 (see + below), its height reaches at least n. The node is known to have the + largest height among active nodes. + """ + height = R_nodes[u]["height"] + curr_edge = R_nodes[u]["curr_edge"] + # next_height represents the next height to examine after discharging + # the current node. During phase 1, it is capped to below n. + next_height = height + levels[height].active.remove(u) + while True: + v, attr = curr_edge.get() + if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]: + flow = min(R_nodes[u]["excess"], attr["capacity"] - attr["flow"]) + push(u, v, flow) + activate(v) + if R_nodes[u]["excess"] == 0: + # The node has become inactive. + levels[height].inactive.add(u) + break + try: + curr_edge.move_to_next() + except StopIteration: + # We have run off the end of the adjacency list, and there can + # be no more admissible edges. Relabel the node to create one. + height = relabel(u) + if is_phase1 and height >= n - 1: + # Although the node is still active, with a height at least + # n - 1, it is now known to be on the s side of the minimum + # s-t cut. Stop processing it until phase 2. + levels[height].active.add(u) + break + # The first relabel operation after global relabeling may not + # increase the height of the node since the 'current edge' data + # structure is not rewound. Use height instead of (height - 1) + # in case other active nodes at the same level are missed. + next_height = height + R_nodes[u]["height"] = height + return next_height + + def gap_heuristic(height): + """Apply the gap heuristic.""" + # Move all nodes at levels (height + 1) to max_height to level n + 1. + for level in islice(levels, height + 1, max_height + 1): + for u in level.active: + R_nodes[u]["height"] = n + 1 + for u in level.inactive: + R_nodes[u]["height"] = n + 1 + levels[n + 1].active.update(level.active) + level.active.clear() + levels[n + 1].inactive.update(level.inactive) + level.inactive.clear() + + def global_relabel(from_sink): + """Apply the global relabeling heuristic.""" + src = t if from_sink else s + heights = reverse_bfs(src) + if not from_sink: + # s must be reachable from t. Remove t explicitly. + del heights[t] + max_height = max(heights.values()) + if from_sink: + # Also mark nodes from which t is unreachable for relabeling. This + # serves the same purpose as the gap heuristic. + for u in R: + if u not in heights and R_nodes[u]["height"] < n: + heights[u] = n + 1 + else: + # Shift the computed heights because the height of s is n. + for u in heights: + heights[u] += n + max_height += n + del heights[src] + for u, new_height in heights.items(): + old_height = R_nodes[u]["height"] + if new_height != old_height: + if u in levels[old_height].active: + levels[old_height].active.remove(u) + levels[new_height].active.add(u) + else: + levels[old_height].inactive.remove(u) + levels[new_height].inactive.add(u) + R_nodes[u]["height"] = new_height + return max_height + + # Phase 1: Find the maximum preflow by pushing as much flow as possible to + # t. + + height = max_height + while height > 0: + # Discharge active nodes in the current level. + while True: + level = levels[height] + if not level.active: + # All active nodes in the current level have been discharged. + # Move to the next lower level. + height -= 1 + break + # Record the old height and level for the gap heuristic. + old_height = height + old_level = level + u = arbitrary_element(level.active) + height = discharge(u, True) + if grt.is_reached(): + # Global relabeling heuristic: Recompute the exact heights of + # all nodes. + height = global_relabel(True) + max_height = height + grt.clear_work() + elif not old_level.active and not old_level.inactive: + # Gap heuristic: If the level at old_height is empty (a 'gap'), + # a minimum cut has been identified. All nodes with heights + # above old_height can have their heights set to n + 1 and not + # be further processed before a maximum preflow is found. + gap_heuristic(old_height) + height = old_height - 1 + max_height = height + else: + # Update the height of the highest level with at least one + # active node. + max_height = max(max_height, height) + + # A maximum preflow has been found. The excess at t is the maximum flow + # value. + if value_only: + R.graph["flow_value"] = R_nodes[t]["excess"] + return R + + # Phase 2: Convert the maximum preflow into a maximum flow by returning the + # excess to s. + + # Relabel all nodes so that they have accurate heights. + height = global_relabel(False) + grt.clear_work() + + # Continue to discharge the active nodes. + while height > n: + # Discharge active nodes in the current level. + while True: + level = levels[height] + if not level.active: + # All active nodes in the current level have been discharged. + # Move to the next lower level. + height -= 1 + break + u = arbitrary_element(level.active) + height = discharge(u, False) + if grt.is_reached(): + # Global relabeling heuristic. + height = global_relabel(False) + grt.clear_work() + + R.graph["flow_value"] = R_nodes[t]["excess"] + return R + + +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) +def preflow_push( + G, s, t, capacity="capacity", residual=None, global_relabel_freq=1, value_only=False +): + r"""Find a maximum single-commodity flow using the highest-label + preflow-push algorithm. + + This function returns the residual network resulting after computing + the maximum flow. See below for details about the conventions + NetworkX uses for defining residual networks. + + This algorithm has a running time of $O(n^2 \sqrt{m})$ for $n$ nodes and + $m$ edges. + + + Parameters + ---------- + G : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + s : node + Source node for the flow. + + t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + residual : NetworkX graph + Residual network on which the algorithm is to be executed. If None, a + new residual network is created. Default value: None. + + global_relabel_freq : integer, float + Relative frequency of applying the global relabeling heuristic to speed + up the algorithm. If it is None, the heuristic is disabled. Default + value: 1. + + value_only : bool + If False, compute a maximum flow; otherwise, compute a maximum preflow + which is enough for computing the maximum flow value. Default value: + False. + + Returns + ------- + R : NetworkX DiGraph + Residual network after computing the maximum flow. + + Raises + ------ + NetworkXError + The algorithm does not support MultiGraph and MultiDiGraph. If + the input graph is an instance of one of these two classes, a + NetworkXError is raised. + + NetworkXUnbounded + If the graph has a path of infinite capacity, the value of a + feasible flow on the graph is unbounded above and the function + raises a NetworkXUnbounded. + + See also + -------- + :meth:`maximum_flow` + :meth:`minimum_cut` + :meth:`edmonds_karp` + :meth:`shortest_augmenting_path` + + Notes + ----- + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. For each node :samp:`u` in :samp:`R`, + :samp:`R.nodes[u]['excess']` represents the difference between flow into + :samp:`u` and flow out of :samp:`u`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. Reachability to :samp:`t` using + only edges :samp:`(u, v)` such that + :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Examples + -------- + >>> from networkx.algorithms.flow import preflow_push + + The functions that implement flow algorithms and output a residual + network, such as this one, are not imported to the base NetworkX + namespace, so you have to explicitly import them from the flow package. + + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = preflow_push(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") + >>> flow_value == R.graph["flow_value"] + True + >>> # preflow_push also stores the maximum flow value + >>> # in the excess attribute of the sink node t + >>> flow_value == R.nodes["y"]["excess"] + True + >>> # For some problems, you might only want to compute a + >>> # maximum preflow. + >>> R = preflow_push(G, "x", "y", value_only=True) + >>> flow_value == R.graph["flow_value"] + True + >>> flow_value == R.nodes["y"]["excess"] + True + + """ + R = preflow_push_impl(G, s, t, capacity, residual, global_relabel_freq, value_only) + R.graph["algorithm"] = "preflow_push" + nx._clear_cache(R) + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/shortestaugmentingpath.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/shortestaugmentingpath.py new file mode 100644 index 0000000..9f1193f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/shortestaugmentingpath.py @@ -0,0 +1,300 @@ +""" +Shortest augmenting path algorithm for maximum flow problems. +""" + +from collections import deque + +import networkx as nx + +from .edmondskarp import edmonds_karp_core +from .utils import CurrentEdge, build_residual_network + +__all__ = ["shortest_augmenting_path"] + + +def shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff): + """Implementation of the shortest augmenting path algorithm.""" + if s not in G: + raise nx.NetworkXError(f"node {str(s)} not in graph") + if t not in G: + raise nx.NetworkXError(f"node {str(t)} not in graph") + if s == t: + raise nx.NetworkXError("source and sink are the same node") + + if residual is None: + R = build_residual_network(G, capacity) + else: + R = residual + + R_nodes = R.nodes + R_pred = R.pred + R_succ = R.succ + + # Initialize/reset the residual network. + for u in R: + for e in R_succ[u].values(): + e["flow"] = 0 + + # Initialize heights of the nodes. + heights = {t: 0} + q = deque([(t, 0)]) + while q: + u, height = q.popleft() + height += 1 + for v, attr in R_pred[u].items(): + if v not in heights and attr["flow"] < attr["capacity"]: + heights[v] = height + q.append((v, height)) + + if s not in heights: + # t is not reachable from s in the residual network. The maximum flow + # must be zero. + R.graph["flow_value"] = 0 + return R + + n = len(G) + m = R.size() / 2 + + # Initialize heights and 'current edge' data structures of the nodes. + for u in R: + R_nodes[u]["height"] = heights[u] if u in heights else n + R_nodes[u]["curr_edge"] = CurrentEdge(R_succ[u]) + + # Initialize counts of nodes in each level. + counts = [0] * (2 * n - 1) + for u in R: + counts[R_nodes[u]["height"]] += 1 + + inf = R.graph["inf"] + + def augment(path): + """Augment flow along a path from s to t.""" + # Determine the path residual capacity. + flow = inf + it = iter(path) + u = next(it) + for v in it: + attr = R_succ[u][v] + flow = min(flow, attr["capacity"] - attr["flow"]) + u = v + if flow * 2 > inf: + raise nx.NetworkXUnbounded("Infinite capacity path, flow unbounded above.") + # Augment flow along the path. + it = iter(path) + u = next(it) + for v in it: + R_succ[u][v]["flow"] += flow + R_succ[v][u]["flow"] -= flow + u = v + return flow + + def relabel(u): + """Relabel a node to create an admissible edge.""" + height = n - 1 + for v, attr in R_succ[u].items(): + if attr["flow"] < attr["capacity"]: + height = min(height, R_nodes[v]["height"]) + return height + 1 + + if cutoff is None: + cutoff = float("inf") + + # Phase 1: Look for shortest augmenting paths using depth-first search. + + flow_value = 0 + path = [s] + u = s + d = n if not two_phase else int(min(m**0.5, 2 * n ** (2.0 / 3))) + done = R_nodes[s]["height"] >= d + while not done: + height = R_nodes[u]["height"] + curr_edge = R_nodes[u]["curr_edge"] + # Depth-first search for the next node on the path to t. + while True: + v, attr = curr_edge.get() + if height == R_nodes[v]["height"] + 1 and attr["flow"] < attr["capacity"]: + # Advance to the next node following an admissible edge. + path.append(v) + u = v + break + try: + curr_edge.move_to_next() + except StopIteration: + counts[height] -= 1 + if counts[height] == 0: + # Gap heuristic: If relabeling causes a level to become + # empty, a minimum cut has been identified. The algorithm + # can now be terminated. + R.graph["flow_value"] = flow_value + return R + height = relabel(u) + if u == s and height >= d: + if not two_phase: + # t is disconnected from s in the residual network. No + # more augmenting paths exist. + R.graph["flow_value"] = flow_value + return R + else: + # t is at least d steps away from s. End of phase 1. + done = True + break + counts[height] += 1 + R_nodes[u]["height"] = height + if u != s: + # After relabeling, the last edge on the path is no longer + # admissible. Retreat one step to look for an alternative. + path.pop() + u = path[-1] + break + if u == t: + # t is reached. Augment flow along the path and reset it for a new + # depth-first search. + flow_value += augment(path) + if flow_value >= cutoff: + R.graph["flow_value"] = flow_value + return R + path = [s] + u = s + + # Phase 2: Look for shortest augmenting paths using breadth-first search. + flow_value += edmonds_karp_core(R, s, t, cutoff - flow_value) + + R.graph["flow_value"] = flow_value + return R + + +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) +def shortest_augmenting_path( + G, + s, + t, + capacity="capacity", + residual=None, + value_only=False, + two_phase=False, + cutoff=None, +): + r"""Find a maximum single-commodity flow using the shortest augmenting path + algorithm. + + This function returns the residual network resulting after computing + the maximum flow. See below for details about the conventions + NetworkX uses for defining residual networks. + + This algorithm has a running time of $O(n^2 m)$ for $n$ nodes and $m$ + edges. + + + Parameters + ---------- + G : NetworkX graph + Edges of the graph are expected to have an attribute called + 'capacity'. If this attribute is not present, the edge is + considered to have infinite capacity. + + s : node + Source node for the flow. + + t : node + Sink node for the flow. + + capacity : string + Edges of the graph G are expected to have an attribute capacity + that indicates how much flow the edge can support. If this + attribute is not present, the edge is considered to have + infinite capacity. Default value: 'capacity'. + + residual : NetworkX graph + Residual network on which the algorithm is to be executed. If None, a + new residual network is created. Default value: None. + + value_only : bool + If True compute only the value of the maximum flow. This parameter + will be ignored by this algorithm because it is not applicable. + + two_phase : bool + If True, a two-phase variant is used. The two-phase variant improves + the running time on unit-capacity networks from $O(nm)$ to + $O(\min(n^{2/3}, m^{1/2}) m)$. Default value: False. + + cutoff : integer, float + If specified, the algorithm will terminate when the flow value reaches + or exceeds the cutoff. In this case, it may be unable to immediately + determine a minimum cut. Default value: None. + + Returns + ------- + R : NetworkX DiGraph + Residual network after computing the maximum flow. + + Raises + ------ + NetworkXError + The algorithm does not support MultiGraph and MultiDiGraph. If + the input graph is an instance of one of these two classes, a + NetworkXError is raised. + + NetworkXUnbounded + If the graph has a path of infinite capacity, the value of a + feasible flow on the graph is unbounded above and the function + raises a NetworkXUnbounded. + + See also + -------- + :meth:`maximum_flow` + :meth:`minimum_cut` + :meth:`edmonds_karp` + :meth:`preflow_push` + + Notes + ----- + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not + specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such + that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + Examples + -------- + >>> from networkx.algorithms.flow import shortest_augmenting_path + + The functions that implement flow algorithms and output a residual + network, such as this one, are not imported to the base NetworkX + namespace, so you have to explicitly import them from the flow package. + + >>> G = nx.DiGraph() + >>> G.add_edge("x", "a", capacity=3.0) + >>> G.add_edge("x", "b", capacity=1.0) + >>> G.add_edge("a", "c", capacity=3.0) + >>> G.add_edge("b", "c", capacity=5.0) + >>> G.add_edge("b", "d", capacity=4.0) + >>> G.add_edge("d", "e", capacity=2.0) + >>> G.add_edge("c", "y", capacity=2.0) + >>> G.add_edge("e", "y", capacity=3.0) + >>> R = shortest_augmenting_path(G, "x", "y") + >>> flow_value = nx.maximum_flow_value(G, "x", "y") + >>> flow_value + 3.0 + >>> flow_value == R.graph["flow_value"] + True + + """ + R = shortest_augmenting_path_impl(G, s, t, capacity, residual, two_phase, cutoff) + R.graph["algorithm"] = "shortest_augmenting_path" + nx._clear_cache(R) + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..054250d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_gomory_hu.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_gomory_hu.cpython-312.pyc new file mode 100644 index 0000000..d418f3c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_gomory_hu.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow.cpython-312.pyc new file mode 100644 index 0000000..b78334a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow_large_graph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow_large_graph.cpython-312.pyc new file mode 100644 index 0000000..2e6f63d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_maxflow_large_graph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_mincost.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_mincost.cpython-312.pyc new file mode 100644 index 0000000..a798a04 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_mincost.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_networksimplex.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_networksimplex.cpython-312.pyc new file mode 100644 index 0000000..4f99e47 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/__pycache__/test_networksimplex.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2 b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2 new file mode 100644 index 0000000..e6ed574 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gl1.gpickle.bz2 differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2 b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2 new file mode 100644 index 0000000..abd0e8a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/gw1.gpickle.bz2 differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2 b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2 new file mode 100644 index 0000000..cd3ea80 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/netgen-2.gpickle.bz2 differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py new file mode 100644 index 0000000..1649ec8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_gomory_hu.py @@ -0,0 +1,128 @@ +from itertools import combinations + +import pytest + +import networkx as nx +from networkx.algorithms.flow import ( + boykov_kolmogorov, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +) + +flow_funcs = [ + boykov_kolmogorov, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +] + + +class TestGomoryHuTree: + def minimum_edge_weight(self, T, u, v): + path = nx.shortest_path(T, u, v, weight="weight") + return min((T[u][v]["weight"], (u, v)) for (u, v) in zip(path, path[1:])) + + def compute_cutset(self, G, T_orig, edge): + T = T_orig.copy() + T.remove_edge(*edge) + U, V = list(nx.connected_components(T)) + cutset = set() + for x, nbrs in ((n, G[n]) for n in U): + cutset.update((x, y) for y in nbrs if y in V) + return cutset + + def test_default_flow_function_karate_club_graph(self): + G = nx.karate_club_graph() + nx.set_edge_attributes(G, 1, "capacity") + T = nx.gomory_hu_tree(G) + assert nx.is_tree(T) + for u, v in combinations(G, 2): + cut_value, edge = self.minimum_edge_weight(T, u, v) + assert nx.minimum_cut_value(G, u, v) == cut_value + + def test_karate_club_graph(self): + G = nx.karate_club_graph() + nx.set_edge_attributes(G, 1, "capacity") + for flow_func in flow_funcs: + T = nx.gomory_hu_tree(G, flow_func=flow_func) + assert nx.is_tree(T) + for u, v in combinations(G, 2): + cut_value, edge = self.minimum_edge_weight(T, u, v) + assert nx.minimum_cut_value(G, u, v) == cut_value + + def test_davis_southern_women_graph(self): + G = nx.davis_southern_women_graph() + nx.set_edge_attributes(G, 1, "capacity") + for flow_func in flow_funcs: + T = nx.gomory_hu_tree(G, flow_func=flow_func) + assert nx.is_tree(T) + for u, v in combinations(G, 2): + cut_value, edge = self.minimum_edge_weight(T, u, v) + assert nx.minimum_cut_value(G, u, v) == cut_value + + def test_florentine_families_graph(self): + G = nx.florentine_families_graph() + nx.set_edge_attributes(G, 1, "capacity") + for flow_func in flow_funcs: + T = nx.gomory_hu_tree(G, flow_func=flow_func) + assert nx.is_tree(T) + for u, v in combinations(G, 2): + cut_value, edge = self.minimum_edge_weight(T, u, v) + assert nx.minimum_cut_value(G, u, v) == cut_value + + @pytest.mark.slow + def test_les_miserables_graph_cutset(self): + G = nx.les_miserables_graph() + nx.set_edge_attributes(G, 1, "capacity") + for flow_func in flow_funcs: + T = nx.gomory_hu_tree(G, flow_func=flow_func) + assert nx.is_tree(T) + for u, v in combinations(G, 2): + cut_value, edge = self.minimum_edge_weight(T, u, v) + assert nx.minimum_cut_value(G, u, v) == cut_value + + def test_karate_club_graph_cutset(self): + G = nx.karate_club_graph() + nx.set_edge_attributes(G, 1, "capacity") + T = nx.gomory_hu_tree(G) + assert nx.is_tree(T) + u, v = 0, 33 + cut_value, edge = self.minimum_edge_weight(T, u, v) + cutset = self.compute_cutset(G, T, edge) + assert cut_value == len(cutset) + + def test_wikipedia_example(self): + # Example from https://en.wikipedia.org/wiki/Gomory%E2%80%93Hu_tree + G = nx.Graph() + G.add_weighted_edges_from( + ( + (0, 1, 1), + (0, 2, 7), + (1, 2, 1), + (1, 3, 3), + (1, 4, 2), + (2, 4, 4), + (3, 4, 1), + (3, 5, 6), + (4, 5, 2), + ) + ) + for flow_func in flow_funcs: + T = nx.gomory_hu_tree(G, capacity="weight", flow_func=flow_func) + assert nx.is_tree(T) + for u, v in combinations(G, 2): + cut_value, edge = self.minimum_edge_weight(T, u, v) + assert nx.minimum_cut_value(G, u, v, capacity="weight") == cut_value + + def test_directed_raises(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + T = nx.gomory_hu_tree(G) + + def test_empty_raises(self): + with pytest.raises(nx.NetworkXError): + G = nx.empty_graph() + T = nx.gomory_hu_tree(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow.py new file mode 100644 index 0000000..71381ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow.py @@ -0,0 +1,573 @@ +"""Maximum flow algorithms test suite.""" + +import pytest + +import networkx as nx +from networkx.algorithms.flow import ( + boykov_kolmogorov, + build_flow_dict, + build_residual_network, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +) + +flow_funcs = { + boykov_kolmogorov, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +} + +max_min_funcs = {nx.maximum_flow, nx.minimum_cut} +flow_value_funcs = {nx.maximum_flow_value, nx.minimum_cut_value} +interface_funcs = max_min_funcs | flow_value_funcs +all_funcs = flow_funcs | interface_funcs + + +def compute_cutset(G, partition): + reachable, non_reachable = partition + cutset = set() + for u, nbrs in ((n, G[n]) for n in reachable): + cutset.update((u, v) for v in nbrs if v in non_reachable) + return cutset + + +def validate_flows(G, s, t, flowDict, solnValue, capacity, flow_func): + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert set(G) == set(flowDict), errmsg + for u in G: + assert set(G[u]) == set(flowDict[u]), errmsg + excess = dict.fromkeys(flowDict, 0) + for u in flowDict: + for v, flow in flowDict[u].items(): + if capacity in G[u][v]: + assert flow <= G[u][v][capacity] + assert flow >= 0, errmsg + excess[u] -= flow + excess[v] += flow + for u, exc in excess.items(): + if u == s: + assert exc == -solnValue, errmsg + elif u == t: + assert exc == solnValue, errmsg + else: + assert exc == 0, errmsg + + +def validate_cuts(G, s, t, solnValue, partition, capacity, flow_func): + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert all(n in G for n in partition[0]), errmsg + assert all(n in G for n in partition[1]), errmsg + cutset = compute_cutset(G, partition) + assert all(G.has_edge(u, v) for (u, v) in cutset), errmsg + assert solnValue == sum(G[u][v][capacity] for (u, v) in cutset), errmsg + H = G.copy() + H.remove_edges_from(cutset) + if not G.is_directed(): + assert not nx.is_connected(H), errmsg + else: + assert not nx.is_strongly_connected(H), errmsg + + +def compare_flows_and_cuts(G, s, t, solnValue, capacity="capacity"): + for flow_func in flow_funcs: + errmsg = f"Assertion failed in function: {flow_func.__name__}" + R = flow_func(G, s, t, capacity) + # Test both legacy and new implementations. + flow_value = R.graph["flow_value"] + flow_dict = build_flow_dict(G, R) + assert flow_value == solnValue, errmsg + validate_flows(G, s, t, flow_dict, solnValue, capacity, flow_func) + # Minimum cut + cut_value, partition = nx.minimum_cut( + G, s, t, capacity=capacity, flow_func=flow_func + ) + validate_cuts(G, s, t, solnValue, partition, capacity, flow_func) + + +class TestMaxflowMinCutCommon: + def test_graph1(self): + # Trivial undirected graph + G = nx.Graph() + G.add_edge(1, 2, capacity=1.0) + + # solution flows + # {1: {2: 1.0}, 2: {1: 1.0}} + + compare_flows_and_cuts(G, 1, 2, 1.0) + + def test_graph2(self): + # A more complex undirected graph + # adapted from https://web.archive.org/web/20220815055650/https://www.topcoder.com/thrive/articles/Maximum%20Flow:%20Part%20One + G = nx.Graph() + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) + + # H + # { + # "x": {"a": 3, "b": 1}, + # "a": {"c": 3, "x": 3}, + # "b": {"c": 1, "d": 2, "x": 1}, + # "c": {"a": 3, "b": 1, "y": 2}, + # "d": {"b": 2, "e": 2}, + # "e": {"d": 2, "y": 2}, + # "y": {"c": 2, "e": 2}, + # } + + compare_flows_and_cuts(G, "x", "y", 4.0) + + def test_digraph1(self): + # The classic directed graph example + G = nx.DiGraph() + G.add_edge("a", "b", capacity=1000.0) + G.add_edge("a", "c", capacity=1000.0) + G.add_edge("b", "c", capacity=1.0) + G.add_edge("b", "d", capacity=1000.0) + G.add_edge("c", "d", capacity=1000.0) + + # H + # { + # "a": {"b": 1000.0, "c": 1000.0}, + # "b": {"c": 0, "d": 1000.0}, + # "c": {"d": 1000.0}, + # "d": {}, + # } + + compare_flows_and_cuts(G, "a", "d", 2000.0) + + def test_digraph2(self): + # An example in which some edges end up with zero flow. + G = nx.DiGraph() + G.add_edge("s", "b", capacity=2) + G.add_edge("s", "c", capacity=1) + G.add_edge("c", "d", capacity=1) + G.add_edge("d", "a", capacity=1) + G.add_edge("b", "a", capacity=2) + G.add_edge("a", "t", capacity=2) + + # H + # { + # "s": {"b": 2, "c": 0}, + # "c": {"d": 0}, + # "d": {"a": 0}, + # "b": {"a": 2}, + # "a": {"t": 2}, + # "t": {}, + # } + + compare_flows_and_cuts(G, "s", "t", 2) + + def test_digraph3(self): + # A directed graph example from Cormen et al. + G = nx.DiGraph() + G.add_edge("s", "v1", capacity=16.0) + G.add_edge("s", "v2", capacity=13.0) + G.add_edge("v1", "v2", capacity=10.0) + G.add_edge("v2", "v1", capacity=4.0) + G.add_edge("v1", "v3", capacity=12.0) + G.add_edge("v3", "v2", capacity=9.0) + G.add_edge("v2", "v4", capacity=14.0) + G.add_edge("v4", "v3", capacity=7.0) + G.add_edge("v3", "t", capacity=20.0) + G.add_edge("v4", "t", capacity=4.0) + + # H + # { + # "s": {"v1": 12.0, "v2": 11.0}, + # "v2": {"v1": 0, "v4": 11.0}, + # "v1": {"v2": 0, "v3": 12.0}, + # "v3": {"v2": 0, "t": 19.0}, + # "v4": {"v3": 7.0, "t": 4.0}, + # "t": {}, + # } + + compare_flows_and_cuts(G, "s", "t", 23.0) + + def test_digraph4(self): + # A more complex directed graph + # from https://web.archive.org/web/20220815055650/https://www.topcoder.com/thrive/articles/Maximum%20Flow:%20Part%20One + G = nx.DiGraph() + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) + + # H + # { + # "x": {"a": 2.0, "b": 1.0}, + # "a": {"c": 2.0}, + # "b": {"c": 0, "d": 1.0}, + # "c": {"y": 2.0}, + # "d": {"e": 1.0}, + # "e": {"y": 1.0}, + # "y": {}, + # } + + compare_flows_and_cuts(G, "x", "y", 3.0) + + def test_wikipedia_dinitz_example(self): + # Nice example from https://en.wikipedia.org/wiki/Dinic's_algorithm + G = nx.DiGraph() + G.add_edge("s", 1, capacity=10) + G.add_edge("s", 2, capacity=10) + G.add_edge(1, 3, capacity=4) + G.add_edge(1, 4, capacity=8) + G.add_edge(1, 2, capacity=2) + G.add_edge(2, 4, capacity=9) + G.add_edge(3, "t", capacity=10) + G.add_edge(4, 3, capacity=6) + G.add_edge(4, "t", capacity=10) + + # solution flows + # { + # 1: {2: 0, 3: 4, 4: 6}, + # 2: {4: 9}, + # 3: {"t": 9}, + # 4: {3: 5, "t": 10}, + # "s": {1: 10, 2: 9}, + # "t": {}, + # } + + compare_flows_and_cuts(G, "s", "t", 19) + + def test_optional_capacity(self): + # Test optional capacity parameter. + G = nx.DiGraph() + G.add_edge("x", "a", spam=3.0) + G.add_edge("x", "b", spam=1.0) + G.add_edge("a", "c", spam=3.0) + G.add_edge("b", "c", spam=5.0) + G.add_edge("b", "d", spam=4.0) + G.add_edge("d", "e", spam=2.0) + G.add_edge("c", "y", spam=2.0) + G.add_edge("e", "y", spam=3.0) + + # solution flows + # { + # "x": {"a": 2.0, "b": 1.0}, + # "a": {"c": 2.0}, + # "b": {"c": 0, "d": 1.0}, + # "c": {"y": 2.0}, + # "d": {"e": 1.0}, + # "e": {"y": 1.0}, + # "y": {}, + # } + solnValue = 3.0 + s = "x" + t = "y" + + compare_flows_and_cuts(G, s, t, solnValue, capacity="spam") + + def test_digraph_infcap_edges(self): + # DiGraph with infinite capacity edges + G = nx.DiGraph() + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c", capacity=25) + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") + + # H + # { + # "s": {"a": 85, "b": 12}, + # "a": {"c": 25, "t": 60}, + # "b": {"c": 12}, + # "c": {"t": 37}, + # "t": {}, + # } + + compare_flows_and_cuts(G, "s", "t", 97) + + # DiGraph with infinite capacity digon + G = nx.DiGraph() + G.add_edge("s", "a", capacity=85) + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c") + G.add_edge("c", "a") + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t", capacity=37) + + # H + # { + # "s": {"a": 85, "b": 12}, + # "a": {"c": 25, "t": 60}, + # "c": {"a": 0, "t": 37}, + # "b": {"c": 12}, + # "t": {}, + # } + + compare_flows_and_cuts(G, "s", "t", 97) + + def test_digraph_infcap_path(self): + # Graph with infinite capacity (s, t)-path + G = nx.DiGraph() + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c") + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") + + for flow_func in all_funcs: + pytest.raises(nx.NetworkXUnbounded, flow_func, G, "s", "t") + + def test_graph_infcap_edges(self): + # Undirected graph with infinite capacity edges + G = nx.Graph() + G.add_edge("s", "a") + G.add_edge("s", "b", capacity=30) + G.add_edge("a", "c", capacity=25) + G.add_edge("b", "c", capacity=12) + G.add_edge("a", "t", capacity=60) + G.add_edge("c", "t") + + # H + # { + # "s": {"a": 85, "b": 12}, + # "a": {"c": 25, "s": 85, "t": 60}, + # "b": {"c": 12, "s": 12}, + # "c": {"a": 25, "b": 12, "t": 37}, + # "t": {"a": 60, "c": 37}, + # } + + compare_flows_and_cuts(G, "s", "t", 97) + + def test_digraph5(self): + # From ticket #429 by mfrasca. + G = nx.DiGraph() + G.add_edge("s", "a", capacity=2) + G.add_edge("s", "b", capacity=2) + G.add_edge("a", "b", capacity=5) + G.add_edge("a", "t", capacity=1) + G.add_edge("b", "a", capacity=1) + G.add_edge("b", "t", capacity=3) + # flow solution + # { + # "a": {"b": 1, "t": 1}, + # "b": {"a": 0, "t": 3}, + # "s": {"a": 2, "b": 2}, + # "t": {}, + # } + compare_flows_and_cuts(G, "s", "t", 4) + + def test_disconnected(self): + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") + G.remove_node(1) + assert nx.maximum_flow_value(G, 0, 3) == 0 + # flow solution + # {0: {}, 2: {3: 0}, 3: {2: 0}} + compare_flows_and_cuts(G, 0, 3, 0) + + def test_source_target_not_in_graph(self): + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") + G.remove_node(0) + for flow_func in all_funcs: + pytest.raises(nx.NetworkXError, flow_func, G, 0, 3) + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") + G.remove_node(3) + for flow_func in all_funcs: + pytest.raises(nx.NetworkXError, flow_func, G, 0, 3) + + def test_source_target_coincide(self): + G = nx.Graph() + G.add_node(0) + for flow_func in all_funcs: + pytest.raises(nx.NetworkXError, flow_func, G, 0, 0) + + def test_multigraphs_raise(self): + G = nx.MultiGraph() + M = nx.MultiDiGraph() + G.add_edges_from([(0, 1), (1, 0)], capacity=True) + for flow_func in all_funcs: + pytest.raises(nx.NetworkXError, flow_func, G, 0, 0) + + +class TestMaxFlowMinCutInterface: + def setup_method(self): + G = nx.DiGraph() + G.add_edge("x", "a", capacity=3.0) + G.add_edge("x", "b", capacity=1.0) + G.add_edge("a", "c", capacity=3.0) + G.add_edge("b", "c", capacity=5.0) + G.add_edge("b", "d", capacity=4.0) + G.add_edge("d", "e", capacity=2.0) + G.add_edge("c", "y", capacity=2.0) + G.add_edge("e", "y", capacity=3.0) + self.G = G + H = nx.DiGraph() + H.add_edge(0, 1, capacity=1.0) + H.add_edge(1, 2, capacity=1.0) + self.H = H + + def test_flow_func_not_callable(self): + elements = ["this_should_be_callable", 10, {1, 2, 3}] + G = nx.Graph() + G.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1)], weight="capacity") + for flow_func in interface_funcs: + for element in elements: + pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element) + pytest.raises(nx.NetworkXError, flow_func, G, 0, 1, flow_func=element) + + def test_flow_func_parameters(self): + G = self.G + fv = 3.0 + for interface_func in interface_funcs: + for flow_func in flow_funcs: + errmsg = ( + f"Assertion failed in function: {flow_func.__name__} " + f"in interface {interface_func.__name__}" + ) + result = interface_func(G, "x", "y", flow_func=flow_func) + if interface_func in max_min_funcs: + result = result[0] + assert fv == result, errmsg + + def test_minimum_cut_no_cutoff(self): + G = self.G + pytest.raises( + nx.NetworkXError, + nx.minimum_cut, + G, + "x", + "y", + flow_func=preflow_push, + cutoff=1.0, + ) + pytest.raises( + nx.NetworkXError, + nx.minimum_cut_value, + G, + "x", + "y", + flow_func=preflow_push, + cutoff=1.0, + ) + + def test_kwargs(self): + G = self.H + fv = 1.0 + to_test = ( + (shortest_augmenting_path, {"two_phase": True}), + (preflow_push, {"global_relabel_freq": 5}), + ) + for interface_func in interface_funcs: + for flow_func, kwargs in to_test: + errmsg = ( + f"Assertion failed in function: {flow_func.__name__} " + f"in interface {interface_func.__name__}" + ) + result = interface_func(G, 0, 2, flow_func=flow_func, **kwargs) + if interface_func in max_min_funcs: + result = result[0] + assert fv == result, errmsg + + def test_kwargs_default_flow_func(self): + G = self.H + for interface_func in interface_funcs: + pytest.raises( + nx.NetworkXError, interface_func, G, 0, 1, global_relabel_freq=2 + ) + + def test_reusing_residual(self): + G = self.G + fv = 3.0 + s, t = "x", "y" + R = build_residual_network(G, "capacity") + for interface_func in interface_funcs: + for flow_func in flow_funcs: + errmsg = ( + f"Assertion failed in function: {flow_func.__name__} " + f"in interface {interface_func.__name__}" + ) + for i in range(3): + result = interface_func( + G, "x", "y", flow_func=flow_func, residual=R + ) + if interface_func in max_min_funcs: + result = result[0] + assert fv == result, errmsg + + +# Tests specific to one algorithm +def test_preflow_push_global_relabel_freq(): + G = nx.DiGraph() + G.add_edge(1, 2, capacity=1) + R = preflow_push(G, 1, 2, global_relabel_freq=None) + assert R.graph["flow_value"] == 1 + pytest.raises(nx.NetworkXError, preflow_push, G, 1, 2, global_relabel_freq=-1) + + +def test_preflow_push_makes_enough_space(): + # From ticket #1542 + G = nx.DiGraph() + nx.add_path(G, [0, 1, 3], capacity=1) + nx.add_path(G, [1, 2, 3], capacity=1) + R = preflow_push(G, 0, 3, value_only=False) + assert R.graph["flow_value"] == 1 + + +def test_shortest_augmenting_path_two_phase(): + k = 5 + p = 1000 + G = nx.DiGraph() + for i in range(k): + G.add_edge("s", (i, 0), capacity=1) + nx.add_path(G, ((i, j) for j in range(p)), capacity=1) + G.add_edge((i, p - 1), "t", capacity=1) + R = shortest_augmenting_path(G, "s", "t", two_phase=True) + assert R.graph["flow_value"] == k + R = shortest_augmenting_path(G, "s", "t", two_phase=False) + assert R.graph["flow_value"] == k + + +class TestCutoff: + def test_cutoff(self): + k = 5 + p = 1000 + G = nx.DiGraph() + for i in range(k): + G.add_edge("s", (i, 0), capacity=2) + nx.add_path(G, ((i, j) for j in range(p)), capacity=2) + G.add_edge((i, p - 1), "t", capacity=2) + R = shortest_augmenting_path(G, "s", "t", two_phase=True, cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + R = shortest_augmenting_path(G, "s", "t", two_phase=False, cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + R = edmonds_karp(G, "s", "t", cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + R = dinitz(G, "s", "t", cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + R = boykov_kolmogorov(G, "s", "t", cutoff=k) + assert k <= R.graph["flow_value"] <= (2 * k) + + def test_complete_graph_cutoff(self): + G = nx.complete_graph(5) + nx.set_edge_attributes(G, dict.fromkeys(G.edges(), 1), "capacity") + for flow_func in [ + shortest_augmenting_path, + edmonds_karp, + dinitz, + boykov_kolmogorov, + ]: + for cutoff in [3, 2, 1]: + result = nx.maximum_flow_value( + G, 0, 4, flow_func=flow_func, cutoff=cutoff + ) + assert cutoff == result, f"cutoff error in {flow_func.__name__}" diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow_large_graph.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow_large_graph.py new file mode 100644 index 0000000..fdc4e3d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_maxflow_large_graph.py @@ -0,0 +1,155 @@ +"""Maximum flow algorithms test suite on large graphs.""" + +import bz2 +import importlib.resources +import pickle + +import pytest + +import networkx as nx +from networkx.algorithms.flow import ( + boykov_kolmogorov, + build_flow_dict, + build_residual_network, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +) + +flow_funcs = [ + boykov_kolmogorov, + dinitz, + edmonds_karp, + preflow_push, + shortest_augmenting_path, +] + + +def gen_pyramid(N): + # This graph admits a flow of value 1 for which every arc is at + # capacity (except the arcs incident to the sink which have + # infinite capacity). + G = nx.DiGraph() + + for i in range(N - 1): + cap = 1.0 / (i + 2) + for j in range(i + 1): + G.add_edge((i, j), (i + 1, j), capacity=cap) + cap = 1.0 / (i + 1) - cap + G.add_edge((i, j), (i + 1, j + 1), capacity=cap) + cap = 1.0 / (i + 2) - cap + + for j in range(N): + G.add_edge((N - 1, j), "t") + + return G + + +def read_graph(name): + fname = ( + importlib.resources.files("networkx.algorithms.flow.tests") + / f"{name}.gpickle.bz2" + ) + + with bz2.BZ2File(fname, "rb") as f: + G = pickle.load(f) + return G + + +def validate_flows(G, s, t, soln_value, R, flow_func): + flow_value = R.graph["flow_value"] + flow_dict = build_flow_dict(G, R) + errmsg = f"Assertion failed in function: {flow_func.__name__}" + assert soln_value == flow_value, errmsg + assert set(G) == set(flow_dict), errmsg + for u in G: + assert set(G[u]) == set(flow_dict[u]), errmsg + excess = dict.fromkeys(flow_dict, 0) + for u in flow_dict: + for v, flow in flow_dict[u].items(): + assert flow <= G[u][v].get("capacity", float("inf")), errmsg + assert flow >= 0, errmsg + excess[u] -= flow + excess[v] += flow + for u, exc in excess.items(): + if u == s: + assert exc == -soln_value, errmsg + elif u == t: + assert exc == soln_value, errmsg + else: + assert exc == 0, errmsg + + +class TestMaxflowLargeGraph: + def test_complete_graph(self): + N = 50 + G = nx.complete_graph(N) + nx.set_edge_attributes(G, 5, "capacity") + R = build_residual_network(G, "capacity") + kwargs = {"residual": R} + + for flow_func in flow_funcs: + kwargs["flow_func"] = flow_func + errmsg = f"Assertion failed in function: {flow_func.__name__}" + flow_value = nx.maximum_flow_value(G, 1, 2, **kwargs) + assert flow_value == 5 * (N - 1), errmsg + + def test_pyramid(self): + N = 10 + # N = 100 # this gives a graph with 5051 nodes + G = gen_pyramid(N) + R = build_residual_network(G, "capacity") + kwargs = {"residual": R} + + for flow_func in flow_funcs: + kwargs["flow_func"] = flow_func + errmsg = f"Assertion failed in function: {flow_func.__name__}" + flow_value = nx.maximum_flow_value(G, (0, 0), "t", **kwargs) + assert flow_value == pytest.approx(1.0, abs=1e-7) + + def test_gl1(self): + G = read_graph("gl1") + s = 1 + t = len(G) + R = build_residual_network(G, "capacity") + kwargs = {"residual": R} + + # do one flow_func to save time + flow_func = flow_funcs[0] + validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs), flow_func) + + # for flow_func in flow_funcs: + # validate_flows(G, s, t, 156545, flow_func(G, s, t, **kwargs), + # flow_func) + + @pytest.mark.slow + def test_gw1(self): + G = read_graph("gw1") + s = 1 + t = len(G) + R = build_residual_network(G, "capacity") + kwargs = {"residual": R} + + for flow_func in flow_funcs: + validate_flows(G, s, t, 1202018, flow_func(G, s, t, **kwargs), flow_func) + + def test_wlm3(self): + G = read_graph("wlm3") + s = 1 + t = len(G) + R = build_residual_network(G, "capacity") + kwargs = {"residual": R} + + # do one flow_func to save time + flow_func = flow_funcs[0] + validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs), flow_func) + + # for flow_func in flow_funcs: + # validate_flows(G, s, t, 11875108, flow_func(G, s, t, **kwargs), + # flow_func) + + def test_preflow_push_global_relabel(self): + G = read_graph("gw1") + R = preflow_push(G, 1, len(G), global_relabel_freq=50) + assert R.graph["flow_value"] == 1202018 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_mincost.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_mincost.py new file mode 100644 index 0000000..edc6262 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_mincost.py @@ -0,0 +1,475 @@ +import bz2 +import importlib.resources +import pickle + +import pytest + +import networkx as nx + + +class TestMinCostFlow: + def test_simple_digraph(self): + G = nx.DiGraph() + G.add_node("a", demand=-5) + G.add_node("d", demand=5) + G.add_edge("a", "b", weight=3, capacity=4) + G.add_edge("a", "c", weight=6, capacity=10) + G.add_edge("b", "d", weight=1, capacity=9) + G.add_edge("c", "d", weight=2, capacity=5) + flowCost, H = nx.network_simplex(G) + soln = {"a": {"b": 4, "c": 1}, "b": {"d": 4}, "c": {"d": 1}, "d": {}} + assert flowCost == 24 + assert nx.min_cost_flow_cost(G) == 24 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 24 + + flowCost, H = nx.capacity_scaling(G) + assert flowCost == 24 + assert nx.cost_of_flow(G, H) == 24 + assert H == soln + + def test_negcycle_infcap(self): + G = nx.DiGraph() + G.add_node("s", demand=-5) + G.add_node("t", demand=5) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("c", "a", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("d", "c", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) + + def test_sum_demands_not_zero(self): + G = nx.DiGraph() + G.add_node("s", demand=-5) + G.add_node("t", demand=4) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("a", "c", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("c", "d", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + + def test_no_flow_satisfying_demands(self): + G = nx.DiGraph() + G.add_node("s", demand=-5) + G.add_node("t", demand=5) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("a", "c", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("c", "d", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + + def test_transshipment(self): + G = nx.DiGraph() + G.add_node("a", demand=1) + G.add_node("b", demand=-2) + G.add_node("c", demand=-2) + G.add_node("d", demand=3) + G.add_node("e", demand=-4) + G.add_node("f", demand=-4) + G.add_node("g", demand=3) + G.add_node("h", demand=2) + G.add_node("r", demand=3) + G.add_edge("a", "c", weight=3) + G.add_edge("r", "a", weight=2) + G.add_edge("b", "a", weight=9) + G.add_edge("r", "c", weight=0) + G.add_edge("b", "r", weight=-6) + G.add_edge("c", "d", weight=5) + G.add_edge("e", "r", weight=4) + G.add_edge("e", "f", weight=3) + G.add_edge("h", "b", weight=4) + G.add_edge("f", "d", weight=7) + G.add_edge("f", "h", weight=12) + G.add_edge("g", "d", weight=12) + G.add_edge("f", "g", weight=-1) + G.add_edge("h", "g", weight=-10) + flowCost, H = nx.network_simplex(G) + soln = { + "a": {"c": 0}, + "b": {"a": 0, "r": 2}, + "c": {"d": 3}, + "d": {}, + "e": {"r": 3, "f": 1}, + "f": {"d": 0, "g": 3, "h": 2}, + "g": {"d": 0}, + "h": {"b": 0, "g": 0}, + "r": {"a": 1, "c": 1}, + } + assert flowCost == 41 + assert nx.min_cost_flow_cost(G) == 41 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 41 + + flowCost, H = nx.capacity_scaling(G) + assert flowCost == 41 + assert nx.cost_of_flow(G, H) == 41 + assert H == soln + + def test_max_flow_min_cost(self): + G = nx.DiGraph() + G.add_edge("s", "a", bandwidth=6) + G.add_edge("s", "c", bandwidth=10, cost=10) + G.add_edge("a", "b", cost=6) + G.add_edge("b", "d", bandwidth=8, cost=7) + G.add_edge("c", "d", cost=10) + G.add_edge("d", "t", bandwidth=5, cost=5) + soln = { + "s": {"a": 5, "c": 0}, + "a": {"b": 5}, + "b": {"d": 5}, + "c": {"d": 0}, + "d": {"t": 5}, + "t": {}, + } + flow = nx.max_flow_min_cost(G, "s", "t", capacity="bandwidth", weight="cost") + assert flow == soln + assert nx.cost_of_flow(G, flow, weight="cost") == 90 + + G.add_edge("t", "s", cost=-100) + flowCost, flow = nx.capacity_scaling(G, capacity="bandwidth", weight="cost") + G.remove_edge("t", "s") + assert flowCost == -410 + assert flow["t"]["s"] == 5 + del flow["t"]["s"] + assert flow == soln + assert nx.cost_of_flow(G, flow, weight="cost") == 90 + + def test_digraph1(self): + # From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied + # Mathematical Programming. Addison-Wesley, 1977. + G = nx.DiGraph() + G.add_node(1, demand=-20) + G.add_node(4, demand=5) + G.add_node(5, demand=15) + G.add_edges_from( + [ + (1, 2, {"capacity": 15, "weight": 4}), + (1, 3, {"capacity": 8, "weight": 4}), + (2, 3, {"weight": 2}), + (2, 4, {"capacity": 4, "weight": 2}), + (2, 5, {"capacity": 10, "weight": 6}), + (3, 4, {"capacity": 15, "weight": 1}), + (3, 5, {"capacity": 5, "weight": 3}), + (4, 5, {"weight": 2}), + (5, 3, {"capacity": 4, "weight": 1}), + ] + ) + flowCost, H = nx.network_simplex(G) + soln = { + 1: {2: 12, 3: 8}, + 2: {3: 8, 4: 4, 5: 0}, + 3: {4: 11, 5: 5}, + 4: {5: 10}, + 5: {3: 0}, + } + assert flowCost == 150 + assert nx.min_cost_flow_cost(G) == 150 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 150 + + flowCost, H = nx.capacity_scaling(G) + assert flowCost == 150 + assert H == soln + assert nx.cost_of_flow(G, H) == 150 + + def test_digraph2(self): + # Example from ticket #430 from mfrasca. Original source: + # http://www.cs.princeton.edu/courses/archive/spr03/cs226/lectures/mincost.4up.pdf, slide 11. + G = nx.DiGraph() + G.add_edge("s", 1, capacity=12) + G.add_edge("s", 2, capacity=6) + G.add_edge("s", 3, capacity=14) + G.add_edge(1, 2, capacity=11, weight=4) + G.add_edge(2, 3, capacity=9, weight=6) + G.add_edge(1, 4, capacity=5, weight=5) + G.add_edge(1, 5, capacity=2, weight=12) + G.add_edge(2, 5, capacity=4, weight=4) + G.add_edge(2, 6, capacity=2, weight=6) + G.add_edge(3, 6, capacity=31, weight=3) + G.add_edge(4, 5, capacity=18, weight=4) + G.add_edge(5, 6, capacity=9, weight=5) + G.add_edge(4, "t", capacity=3) + G.add_edge(5, "t", capacity=7) + G.add_edge(6, "t", capacity=22) + flow = nx.max_flow_min_cost(G, "s", "t") + soln = { + 1: {2: 6, 4: 5, 5: 1}, + 2: {3: 6, 5: 4, 6: 2}, + 3: {6: 20}, + 4: {5: 2, "t": 3}, + 5: {6: 0, "t": 7}, + 6: {"t": 22}, + "s": {1: 12, 2: 6, 3: 14}, + "t": {}, + } + assert flow == soln + + G.add_edge("t", "s", weight=-100) + flowCost, flow = nx.capacity_scaling(G) + G.remove_edge("t", "s") + assert flow["t"]["s"] == 32 + assert flowCost == -3007 + del flow["t"]["s"] + assert flow == soln + assert nx.cost_of_flow(G, flow) == 193 + + def test_digraph3(self): + """Combinatorial Optimization: Algorithms and Complexity, + Papadimitriou Steiglitz at page 140 has an example, 7.1, but that + admits multiple solutions, so I alter it a bit. From ticket #430 + by mfrasca.""" + + G = nx.DiGraph() + G.add_edge("s", "a") + G["s"]["a"].update({0: 2, 1: 4}) + G.add_edge("s", "b") + G["s"]["b"].update({0: 2, 1: 1}) + G.add_edge("a", "b") + G["a"]["b"].update({0: 5, 1: 2}) + G.add_edge("a", "t") + G["a"]["t"].update({0: 1, 1: 5}) + G.add_edge("b", "a") + G["b"]["a"].update({0: 1, 1: 3}) + G.add_edge("b", "t") + G["b"]["t"].update({0: 3, 1: 2}) + + "PS.ex.7.1: testing main function" + sol = nx.max_flow_min_cost(G, "s", "t", capacity=0, weight=1) + flow = sum(v for v in sol["s"].values()) + assert 4 == flow + assert 23 == nx.cost_of_flow(G, sol, weight=1) + assert sol["s"] == {"a": 2, "b": 2} + assert sol["a"] == {"b": 1, "t": 1} + assert sol["b"] == {"a": 0, "t": 3} + assert sol["t"] == {} + + G.add_edge("t", "s") + G["t"]["s"].update({1: -100}) + flowCost, sol = nx.capacity_scaling(G, capacity=0, weight=1) + G.remove_edge("t", "s") + flow = sum(v for v in sol["s"].values()) + assert 4 == flow + assert sol["t"]["s"] == 4 + assert flowCost == -377 + del sol["t"]["s"] + assert sol["s"] == {"a": 2, "b": 2} + assert sol["a"] == {"b": 1, "t": 1} + assert sol["b"] == {"a": 0, "t": 3} + assert sol["t"] == {} + assert nx.cost_of_flow(G, sol, weight=1) == 23 + + def test_zero_capacity_edges(self): + """Address issue raised in ticket #617 by arv.""" + G = nx.DiGraph() + G.add_edges_from( + [ + (1, 2, {"capacity": 1, "weight": 1}), + (1, 5, {"capacity": 1, "weight": 1}), + (2, 3, {"capacity": 0, "weight": 1}), + (2, 5, {"capacity": 1, "weight": 1}), + (5, 3, {"capacity": 2, "weight": 1}), + (5, 4, {"capacity": 0, "weight": 1}), + (3, 4, {"capacity": 2, "weight": 1}), + ] + ) + G.nodes[1]["demand"] = -1 + G.nodes[2]["demand"] = -1 + G.nodes[4]["demand"] = 2 + + flowCost, H = nx.network_simplex(G) + soln = {1: {2: 0, 5: 1}, 2: {3: 0, 5: 1}, 3: {4: 2}, 4: {}, 5: {3: 2, 4: 0}} + assert flowCost == 6 + assert nx.min_cost_flow_cost(G) == 6 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 6 + + flowCost, H = nx.capacity_scaling(G) + assert flowCost == 6 + assert H == soln + assert nx.cost_of_flow(G, H) == 6 + + def test_digon(self): + """Check if digons are handled properly. Taken from ticket + #618 by arv.""" + nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})] + edges = [ + (1, 2, {"capacity": 3, "weight": 600000}), + (2, 1, {"capacity": 2, "weight": 0}), + (2, 3, {"capacity": 5, "weight": 714285}), + (3, 2, {"capacity": 2, "weight": 0}), + ] + G = nx.DiGraph(edges) + G.add_nodes_from(nodes) + flowCost, H = nx.network_simplex(G) + soln = {1: {2: 0}, 2: {1: 0, 3: 4}, 3: {2: 0}} + assert flowCost == 2857140 + assert nx.min_cost_flow_cost(G) == 2857140 + assert H == soln + assert nx.min_cost_flow(G) == soln + assert nx.cost_of_flow(G, H) == 2857140 + + flowCost, H = nx.capacity_scaling(G) + assert flowCost == 2857140 + assert H == soln + assert nx.cost_of_flow(G, H) == 2857140 + + def test_deadend(self): + """Check if one-node cycles are handled properly. Taken from ticket + #2906 from @sshraven.""" + G = nx.DiGraph() + + G.add_nodes_from(range(5), demand=0) + G.nodes[4]["demand"] = -13 + G.nodes[3]["demand"] = 13 + + G.add_edges_from([(0, 2), (0, 3), (2, 1)], capacity=20, weight=0.1) + pytest.raises(nx.NetworkXUnfeasible, nx.min_cost_flow, G) + + def test_infinite_capacity_neg_digon(self): + """An infinite capacity negative cost digon results in an unbounded + instance.""" + nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})] + edges = [ + (1, 2, {"weight": -600}), + (2, 1, {"weight": 0}), + (2, 3, {"capacity": 5, "weight": 714285}), + (3, 2, {"capacity": 2, "weight": 0}), + ] + G = nx.DiGraph(edges) + G.add_nodes_from(nodes) + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) + + def test_finite_capacity_neg_digon(self): + """The digon should receive the maximum amount of flow it can handle. + Taken from ticket #749 by @chuongdo.""" + G = nx.DiGraph() + G.add_edge("a", "b", capacity=1, weight=-1) + G.add_edge("b", "a", capacity=1, weight=-1) + min_cost = -2 + assert nx.min_cost_flow_cost(G) == min_cost + + flowCost, H = nx.capacity_scaling(G) + assert flowCost == -2 + assert H == {"a": {"b": 1}, "b": {"a": 1}} + assert nx.cost_of_flow(G, H) == -2 + + def test_multidigraph(self): + """Multidigraphs are acceptable.""" + G = nx.MultiDiGraph() + G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight="capacity") + flowCost, H = nx.network_simplex(G) + assert flowCost == 0 + assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}} + + flowCost, H = nx.capacity_scaling(G) + assert flowCost == 0 + assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}} + + def test_negative_selfloops(self): + """Negative selfloops should cause an exception if uncapacitated and + always be saturated otherwise. + """ + G = nx.DiGraph() + G.add_edge(1, 1, weight=-1) + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) + G[1][1]["capacity"] = 2 + flowCost, H = nx.network_simplex(G) + assert flowCost == -2 + assert H == {1: {1: 2}} + flowCost, H = nx.capacity_scaling(G) + assert flowCost == -2 + assert H == {1: {1: 2}} + + G = nx.MultiDiGraph() + G.add_edge(1, 1, "x", weight=-1) + G.add_edge(1, 1, "y", weight=1) + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnbounded, nx.capacity_scaling, G) + G[1][1]["x"]["capacity"] = 2 + flowCost, H = nx.network_simplex(G) + assert flowCost == -2 + assert H == {1: {1: {"x": 2, "y": 0}}} + flowCost, H = nx.capacity_scaling(G) + assert flowCost == -2 + assert H == {1: {1: {"x": 2, "y": 0}}} + + def test_bone_shaped(self): + # From #1283 + G = nx.DiGraph() + G.add_node(0, demand=-4) + G.add_node(1, demand=2) + G.add_node(2, demand=2) + G.add_node(3, demand=4) + G.add_node(4, demand=-2) + G.add_node(5, demand=-2) + G.add_edge(0, 1, capacity=4) + G.add_edge(0, 2, capacity=4) + G.add_edge(4, 3, capacity=4) + G.add_edge(5, 3, capacity=4) + G.add_edge(0, 3, capacity=0) + flowCost, H = nx.network_simplex(G) + assert flowCost == 0 + assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}} + flowCost, H = nx.capacity_scaling(G) + assert flowCost == 0 + assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}} + + def test_exceptions(self): + G = nx.Graph() + pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G) + pytest.raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G) + G = nx.MultiGraph() + pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G) + pytest.raises(nx.NetworkXNotImplemented, nx.capacity_scaling, G) + G = nx.DiGraph() + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + # pytest.raises(nx.NetworkXError, nx.capacity_scaling, G) + G.add_node(0, demand=float("inf")) + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G.nodes[0]["demand"] = 0 + G.add_node(1, demand=0) + G.add_edge(0, 1, weight=-float("inf")) + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G[0][1]["weight"] = 0 + G.add_edge(0, 0, weight=float("inf")) + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + # pytest.raises(nx.NetworkXError, nx.capacity_scaling, G) + G[0][0]["weight"] = 0 + G[0][1]["capacity"] = -1 + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + # pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + G[0][1]["capacity"] = 0 + G[0][0]["capacity"] = -1 + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + # pytest.raises(nx.NetworkXUnfeasible, nx.capacity_scaling, G) + + def test_large(self): + fname = ( + importlib.resources.files("networkx.algorithms.flow.tests") + / "netgen-2.gpickle.bz2" + ) + with bz2.BZ2File(fname, "rb") as f: + G = pickle.load(f) + flowCost, flowDict = nx.network_simplex(G) + assert 6749969302 == flowCost + assert 6749969302 == nx.cost_of_flow(G, flowDict) + flowCost, flowDict = nx.capacity_scaling(G) + assert 6749969302 == flowCost + assert 6749969302 == nx.cost_of_flow(G, flowDict) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_networksimplex.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_networksimplex.py new file mode 100644 index 0000000..9a37fc6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/test_networksimplex.py @@ -0,0 +1,481 @@ +import bz2 +import importlib.resources +import pickle + +import pytest + +import networkx as nx + + +@pytest.fixture +def simple_flow_graph(): + G = nx.DiGraph() + G.add_node("a", demand=0) + G.add_node("b", demand=-5) + G.add_node("c", demand=50000000) + G.add_node("d", demand=-49999995) + G.add_edge("a", "b", weight=3, capacity=4) + G.add_edge("a", "c", weight=6, capacity=10) + G.add_edge("b", "d", weight=1, capacity=9) + G.add_edge("c", "d", weight=2, capacity=5) + return G + + +@pytest.fixture +def simple_no_flow_graph(): + G = nx.DiGraph() + G.add_node("s", demand=-5) + G.add_node("t", demand=5) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("a", "c", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("c", "d", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) + return G + + +def get_flowcost_from_flowdict(G, flowDict): + """Returns flow cost calculated from flow dictionary""" + flowCost = 0 + for u in flowDict: + for v in flowDict[u]: + flowCost += flowDict[u][v] * G[u][v]["weight"] + return flowCost + + +def test_infinite_demand_raise(simple_flow_graph): + G = simple_flow_graph + inf = float("inf") + node_name = "a" + nx.set_node_attributes(G, {node_name: {"demand": inf}}) + with pytest.raises( + nx.NetworkXError, + match=f"node '{node_name}' has infinite demand", + ): + nx.network_simplex(G) + + +def test_neg_infinite_demand_raise(simple_flow_graph): + G = simple_flow_graph + inf = float("inf") + node_name = "a" + nx.set_node_attributes(G, {node_name: {"demand": -inf}}) + with pytest.raises( + nx.NetworkXError, + match=f"node '{node_name}' has infinite demand", + ): + nx.network_simplex(G) + + +def test_infinite_weight_raise(simple_flow_graph): + G = simple_flow_graph + inf = float("inf") + nx.set_edge_attributes( + G, {("a", "b"): {"weight": inf}, ("b", "d"): {"weight": inf}} + ) + with pytest.raises( + nx.NetworkXError, + match=r"edge .* has infinite weight", + ): + nx.network_simplex(G) + + +def test_nonzero_net_demand_raise(simple_flow_graph): + G = simple_flow_graph + nx.set_node_attributes(G, {"b": {"demand": -4}}) + with pytest.raises( + nx.NetworkXUnfeasible, + match="total node demand is not zero", + ): + nx.network_simplex(G) + + +def test_negative_capacity_raise(simple_flow_graph): + G = simple_flow_graph + nx.set_edge_attributes(G, {("a", "b"): {"weight": 1}, ("b", "d"): {"capacity": -9}}) + with pytest.raises( + nx.NetworkXUnfeasible, + match=r"edge .* has negative capacity", + ): + nx.network_simplex(G) + + +def test_no_flow_satisfying_demands(simple_no_flow_graph): + G = simple_no_flow_graph + with pytest.raises( + nx.NetworkXUnfeasible, + match="no flow satisfies all node demands", + ): + nx.network_simplex(G) + + +def test_sum_demands_not_zero(simple_no_flow_graph): + G = simple_no_flow_graph + nx.set_node_attributes(G, {"t": {"demand": 4}}) + with pytest.raises( + nx.NetworkXUnfeasible, + match="total node demand is not zero", + ): + nx.network_simplex(G) + + +def test_google_or_tools_example(): + """ + https://developers.google.com/optimization/flow/mincostflow + """ + G = nx.DiGraph() + start_nodes = [0, 0, 1, 1, 1, 2, 2, 3, 4] + end_nodes = [1, 2, 2, 3, 4, 3, 4, 4, 2] + capacities = [15, 8, 20, 4, 10, 15, 4, 20, 5] + unit_costs = [4, 4, 2, 2, 6, 1, 3, 2, 3] + supplies = [20, 0, 0, -5, -15] + answer = 150 + + for i in range(len(supplies)): + G.add_node(i, demand=(-1) * supplies[i]) # supplies are negative of demand + + for i in range(len(start_nodes)): + G.add_edge( + start_nodes[i], end_nodes[i], weight=unit_costs[i], capacity=capacities[i] + ) + + flowCost, flowDict = nx.network_simplex(G) + assert flowCost == answer + assert flowCost == get_flowcost_from_flowdict(G, flowDict) + + +def test_google_or_tools_example2(): + """ + https://developers.google.com/optimization/flow/mincostflow + """ + G = nx.DiGraph() + start_nodes = [0, 0, 1, 1, 1, 2, 2, 3, 4, 3] + end_nodes = [1, 2, 2, 3, 4, 3, 4, 4, 2, 5] + capacities = [15, 8, 20, 4, 10, 15, 4, 20, 5, 10] + unit_costs = [4, 4, 2, 2, 6, 1, 3, 2, 3, 4] + supplies = [23, 0, 0, -5, -15, -3] + answer = 183 + + for i in range(len(supplies)): + G.add_node(i, demand=(-1) * supplies[i]) # supplies are negative of demand + + for i in range(len(start_nodes)): + G.add_edge( + start_nodes[i], end_nodes[i], weight=unit_costs[i], capacity=capacities[i] + ) + + flowCost, flowDict = nx.network_simplex(G) + assert flowCost == answer + assert flowCost == get_flowcost_from_flowdict(G, flowDict) + + +def test_large(): + fname = ( + importlib.resources.files("networkx.algorithms.flow.tests") + / "netgen-2.gpickle.bz2" + ) + + with bz2.BZ2File(fname, "rb") as f: + G = pickle.load(f) + flowCost, flowDict = nx.network_simplex(G) + assert 6749969302 == flowCost + assert 6749969302 == nx.cost_of_flow(G, flowDict) + + +def test_simple_digraph(): + G = nx.DiGraph() + G.add_node("a", demand=-5) + G.add_node("d", demand=5) + G.add_edge("a", "b", weight=3, capacity=4) + G.add_edge("a", "c", weight=6, capacity=10) + G.add_edge("b", "d", weight=1, capacity=9) + G.add_edge("c", "d", weight=2, capacity=5) + flowCost, H = nx.network_simplex(G) + soln = {"a": {"b": 4, "c": 1}, "b": {"d": 4}, "c": {"d": 1}, "d": {}} + assert flowCost == 24 + assert nx.min_cost_flow_cost(G) == 24 + assert H == soln + + +def test_negcycle_infcap(): + G = nx.DiGraph() + G.add_node("s", demand=-5) + G.add_node("t", demand=5) + G.add_edge("s", "a", weight=1, capacity=3) + G.add_edge("a", "b", weight=3) + G.add_edge("c", "a", weight=-6) + G.add_edge("b", "d", weight=1) + G.add_edge("d", "c", weight=-2) + G.add_edge("d", "t", weight=1, capacity=3) + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + + +def test_transshipment(): + G = nx.DiGraph() + G.add_node("a", demand=1) + G.add_node("b", demand=-2) + G.add_node("c", demand=-2) + G.add_node("d", demand=3) + G.add_node("e", demand=-4) + G.add_node("f", demand=-4) + G.add_node("g", demand=3) + G.add_node("h", demand=2) + G.add_node("r", demand=3) + G.add_edge("a", "c", weight=3) + G.add_edge("r", "a", weight=2) + G.add_edge("b", "a", weight=9) + G.add_edge("r", "c", weight=0) + G.add_edge("b", "r", weight=-6) + G.add_edge("c", "d", weight=5) + G.add_edge("e", "r", weight=4) + G.add_edge("e", "f", weight=3) + G.add_edge("h", "b", weight=4) + G.add_edge("f", "d", weight=7) + G.add_edge("f", "h", weight=12) + G.add_edge("g", "d", weight=12) + G.add_edge("f", "g", weight=-1) + G.add_edge("h", "g", weight=-10) + flowCost, H = nx.network_simplex(G) + soln = { + "a": {"c": 0}, + "b": {"a": 0, "r": 2}, + "c": {"d": 3}, + "d": {}, + "e": {"r": 3, "f": 1}, + "f": {"d": 0, "g": 3, "h": 2}, + "g": {"d": 0}, + "h": {"b": 0, "g": 0}, + "r": {"a": 1, "c": 1}, + } + assert flowCost == 41 + assert H == soln + + +def test_digraph1(): + # From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied + # Mathematical Programming. Addison-Wesley, 1977. + G = nx.DiGraph() + G.add_node(1, demand=-20) + G.add_node(4, demand=5) + G.add_node(5, demand=15) + G.add_edges_from( + [ + (1, 2, {"capacity": 15, "weight": 4}), + (1, 3, {"capacity": 8, "weight": 4}), + (2, 3, {"weight": 2}), + (2, 4, {"capacity": 4, "weight": 2}), + (2, 5, {"capacity": 10, "weight": 6}), + (3, 4, {"capacity": 15, "weight": 1}), + (3, 5, {"capacity": 5, "weight": 3}), + (4, 5, {"weight": 2}), + (5, 3, {"capacity": 4, "weight": 1}), + ] + ) + flowCost, H = nx.network_simplex(G) + soln = { + 1: {2: 12, 3: 8}, + 2: {3: 8, 4: 4, 5: 0}, + 3: {4: 11, 5: 5}, + 4: {5: 10}, + 5: {3: 0}, + } + assert flowCost == 150 + assert nx.min_cost_flow_cost(G) == 150 + assert H == soln + + +def test_zero_capacity_edges(): + """Address issue raised in ticket #617 by arv.""" + G = nx.DiGraph() + G.add_edges_from( + [ + (1, 2, {"capacity": 1, "weight": 1}), + (1, 5, {"capacity": 1, "weight": 1}), + (2, 3, {"capacity": 0, "weight": 1}), + (2, 5, {"capacity": 1, "weight": 1}), + (5, 3, {"capacity": 2, "weight": 1}), + (5, 4, {"capacity": 0, "weight": 1}), + (3, 4, {"capacity": 2, "weight": 1}), + ] + ) + G.nodes[1]["demand"] = -1 + G.nodes[2]["demand"] = -1 + G.nodes[4]["demand"] = 2 + + flowCost, H = nx.network_simplex(G) + soln = {1: {2: 0, 5: 1}, 2: {3: 0, 5: 1}, 3: {4: 2}, 4: {}, 5: {3: 2, 4: 0}} + assert flowCost == 6 + assert nx.min_cost_flow_cost(G) == 6 + assert H == soln + + +def test_digon(): + """Check if digons are handled properly. Taken from ticket + #618 by arv.""" + nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})] + edges = [ + (1, 2, {"capacity": 3, "weight": 600000}), + (2, 1, {"capacity": 2, "weight": 0}), + (2, 3, {"capacity": 5, "weight": 714285}), + (3, 2, {"capacity": 2, "weight": 0}), + ] + G = nx.DiGraph(edges) + G.add_nodes_from(nodes) + flowCost, H = nx.network_simplex(G) + soln = {1: {2: 0}, 2: {1: 0, 3: 4}, 3: {2: 0}} + assert flowCost == 2857140 + + +def test_deadend(): + """Check if one-node cycles are handled properly. Taken from ticket + #2906 from @sshraven.""" + G = nx.DiGraph() + + G.add_nodes_from(range(5), demand=0) + G.nodes[4]["demand"] = -13 + G.nodes[3]["demand"] = 13 + + G.add_edges_from([(0, 2), (0, 3), (2, 1)], capacity=20, weight=0.1) + pytest.raises(nx.NetworkXUnfeasible, nx.network_simplex, G) + + +def test_infinite_capacity_neg_digon(): + """An infinite capacity negative cost digon results in an unbounded + instance.""" + nodes = [(1, {}), (2, {"demand": -4}), (3, {"demand": 4})] + edges = [ + (1, 2, {"weight": -600}), + (2, 1, {"weight": 0}), + (2, 3, {"capacity": 5, "weight": 714285}), + (3, 2, {"capacity": 2, "weight": 0}), + ] + G = nx.DiGraph(edges) + G.add_nodes_from(nodes) + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + + +def test_multidigraph(): + """Multidigraphs are acceptable.""" + G = nx.MultiDiGraph() + G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight="capacity") + flowCost, H = nx.network_simplex(G) + assert flowCost == 0 + assert H == {1: {2: {0: 0}}, 2: {3: {0: 0}}, 3: {}} + + +def test_negative_selfloops(): + """Negative selfloops should cause an exception if uncapacitated and + always be saturated otherwise. + """ + G = nx.DiGraph() + G.add_edge(1, 1, weight=-1) + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + + G[1][1]["capacity"] = 2 + flowCost, H = nx.network_simplex(G) + assert flowCost == -2 + assert H == {1: {1: 2}} + + G = nx.MultiDiGraph() + G.add_edge(1, 1, "x", weight=-1) + G.add_edge(1, 1, "y", weight=1) + pytest.raises(nx.NetworkXUnbounded, nx.network_simplex, G) + + G[1][1]["x"]["capacity"] = 2 + flowCost, H = nx.network_simplex(G) + assert flowCost == -2 + assert H == {1: {1: {"x": 2, "y": 0}}} + + +def test_bone_shaped(): + # From #1283 + G = nx.DiGraph() + G.add_node(0, demand=-4) + G.add_node(1, demand=2) + G.add_node(2, demand=2) + G.add_node(3, demand=4) + G.add_node(4, demand=-2) + G.add_node(5, demand=-2) + G.add_edge(0, 1, capacity=4) + G.add_edge(0, 2, capacity=4) + G.add_edge(4, 3, capacity=4) + G.add_edge(5, 3, capacity=4) + G.add_edge(0, 3, capacity=0) + flowCost, H = nx.network_simplex(G) + assert flowCost == 0 + assert H == {0: {1: 2, 2: 2, 3: 0}, 1: {}, 2: {}, 3: {}, 4: {3: 2}, 5: {3: 2}} + + +def test_graphs_type_exceptions(): + G = nx.Graph() + pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G) + G = nx.MultiGraph() + pytest.raises(nx.NetworkXNotImplemented, nx.network_simplex, G) + G = nx.DiGraph() + pytest.raises(nx.NetworkXError, nx.network_simplex, G) + + +@pytest.fixture() +def faux_inf_example(): + """Base test graph for probing faux_infinity bound. See gh-7562""" + G = nx.DiGraph() + + # Add nodes with demands + G.add_node("s0", demand=-4) + G.add_node("s1", demand=-4) + G.add_node("ns", demand=0) + G.add_node("nc", demand=0) + G.add_node("c0", demand=4) + G.add_node("c1", demand=4) + + # Uniformly weighted edges + G.add_edge("s0", "ns", weight=1) + G.add_edge("s1", "ns", weight=1) + G.add_edge("ns", "nc", weight=1) + G.add_edge("nc", "c0", weight=1) + G.add_edge("nc", "c1", weight=1) + + return G + + +@pytest.mark.parametrize("large_capacity", [True, False]) +@pytest.mark.parametrize("large_demand", [True, False]) +@pytest.mark.parametrize("large_weight", [True, False]) +def test_network_simplex_faux_infinity( + faux_inf_example, large_capacity, large_demand, large_weight +): + """network_simplex should not raise an exception as a result of faux_infinity + for these cases. See gh-7562""" + G = faux_inf_example + lv = 1_000_000_000 + + # Modify the base graph with combinations of large values for capacity, + # demand, and weight to probe faux_inifity + if large_capacity: + G["s0"]["ns"]["capacity"] = lv + if large_demand: + G.nodes["s0"]["demand"] = -lv + G.nodes["c1"]["demand"] = lv + if large_weight: + G["s1"]["ns"]["weight"] = lv + + # Execute without raising + fc, fd = nx.network_simplex(G) + + +def test_network_simplex_unbounded_flow(): + G = nx.DiGraph() + # Add nodes + G.add_node("A") + G.add_node("B") + G.add_node("C") + + # Add edges forming a negative cycle + G.add_weighted_edges_from([("A", "B", -5), ("B", "C", -5), ("C", "A", -5)]) + + with pytest.raises( + nx.NetworkXUnbounded, + match="negative cycle with infinite capacity found", + ): + nx.network_simplex(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2 b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2 new file mode 100644 index 0000000..8ce935a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/tests/wlm3.gpickle.bz2 differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/utils.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/utils.py new file mode 100644 index 0000000..9780746 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/flow/utils.py @@ -0,0 +1,194 @@ +""" +Utility classes and functions for network flow algorithms. +""" + +from collections import deque + +import networkx as nx + +__all__ = [ + "CurrentEdge", + "Level", + "GlobalRelabelThreshold", + "build_residual_network", + "detect_unboundedness", + "build_flow_dict", +] + + +class CurrentEdge: + """Mechanism for iterating over out-edges incident to a node in a circular + manner. StopIteration exception is raised when wraparound occurs. + """ + + __slots__ = ("_edges", "_it", "_curr") + + def __init__(self, edges): + self._edges = edges + if self._edges: + self._rewind() + + def get(self): + return self._curr + + def move_to_next(self): + try: + self._curr = next(self._it) + except StopIteration: + self._rewind() + raise + + def _rewind(self): + self._it = iter(self._edges.items()) + self._curr = next(self._it) + + def __eq__(self, other): + return (getattr(self, "_curr", None), self._edges) == ( + (getattr(other, "_curr", None), other._edges) + ) + + +class Level: + """Active and inactive nodes in a level.""" + + __slots__ = ("active", "inactive") + + def __init__(self): + self.active = set() + self.inactive = set() + + +class GlobalRelabelThreshold: + """Measurement of work before the global relabeling heuristic should be + applied. + """ + + def __init__(self, n, m, freq): + self._threshold = (n + m) / freq if freq else float("inf") + self._work = 0 + + def add_work(self, work): + self._work += work + + def is_reached(self): + return self._work >= self._threshold + + def clear_work(self): + self._work = 0 + + +@nx._dispatchable(edge_attrs={"capacity": float("inf")}, returns_graph=True) +def build_residual_network(G, capacity): + """Build a residual network and initialize a zero flow. + + The residual network :samp:`R` from an input graph :samp:`G` has the + same nodes as :samp:`G`. :samp:`R` is a DiGraph that contains a pair + of edges :samp:`(u, v)` and :samp:`(v, u)` iff :samp:`(u, v)` is not a + self-loop, and at least one of :samp:`(u, v)` and :samp:`(v, u)` exists + in :samp:`G`. + + For each edge :samp:`(u, v)` in :samp:`R`, :samp:`R[u][v]['capacity']` + is equal to the capacity of :samp:`(u, v)` in :samp:`G` if it exists + in :samp:`G` or zero otherwise. If the capacity is infinite, + :samp:`R[u][v]['capacity']` will have a high arbitrary finite value + that does not affect the solution of the problem. This value is stored in + :samp:`R.graph['inf']`. For each edge :samp:`(u, v)` in :samp:`R`, + :samp:`R[u][v]['flow']` represents the flow function of :samp:`(u, v)` and + satisfies :samp:`R[u][v]['flow'] == -R[v][u]['flow']`. + + The flow value, defined as the total flow into :samp:`t`, the sink, is + stored in :samp:`R.graph['flow_value']`. If :samp:`cutoff` is not + specified, reachability to :samp:`t` using only edges :samp:`(u, v)` such + that :samp:`R[u][v]['flow'] < R[u][v]['capacity']` induces a minimum + :samp:`s`-:samp:`t` cut. + + """ + if G.is_multigraph(): + raise nx.NetworkXError("MultiGraph and MultiDiGraph not supported (yet).") + + R = nx.DiGraph() + R.__networkx_cache__ = None # Disable caching + R.add_nodes_from(G) + + inf = float("inf") + # Extract edges with positive capacities. Self loops excluded. + edge_list = [ + (u, v, attr) + for u, v, attr in G.edges(data=True) + if u != v and attr.get(capacity, inf) > 0 + ] + # Simulate infinity with three times the sum of the finite edge capacities + # or any positive value if the sum is zero. This allows the + # infinite-capacity edges to be distinguished for unboundedness detection + # and directly participate in residual capacity calculation. If the maximum + # flow is finite, these edges cannot appear in the minimum cut and thus + # guarantee correctness. Since the residual capacity of an + # infinite-capacity edge is always at least 2/3 of inf, while that of an + # finite-capacity edge is at most 1/3 of inf, if an operation moves more + # than 1/3 of inf units of flow to t, there must be an infinite-capacity + # s-t path in G. + inf = ( + 3 + * sum( + attr[capacity] + for u, v, attr in edge_list + if capacity in attr and attr[capacity] != inf + ) + or 1 + ) + if G.is_directed(): + for u, v, attr in edge_list: + r = min(attr.get(capacity, inf), inf) + if not R.has_edge(u, v): + # Both (u, v) and (v, u) must be present in the residual + # network. + R.add_edge(u, v, capacity=r) + R.add_edge(v, u, capacity=0) + else: + # The edge (u, v) was added when (v, u) was visited. + R[u][v]["capacity"] = r + else: + for u, v, attr in edge_list: + # Add a pair of edges with equal residual capacities. + r = min(attr.get(capacity, inf), inf) + R.add_edge(u, v, capacity=r) + R.add_edge(v, u, capacity=r) + + # Record the value simulating infinity. + R.graph["inf"] = inf + + return R + + +@nx._dispatchable( + graphs="R", + preserve_edge_attrs={"R": {"capacity": float("inf")}}, + preserve_graph_attrs=True, +) +def detect_unboundedness(R, s, t): + """Detect an infinite-capacity s-t path in R.""" + q = deque([s]) + seen = {s} + inf = R.graph["inf"] + while q: + u = q.popleft() + for v, attr in R[u].items(): + if attr["capacity"] == inf and v not in seen: + if v == t: + raise nx.NetworkXUnbounded( + "Infinite capacity path, flow unbounded above." + ) + seen.add(v) + q.append(v) + + +@nx._dispatchable(graphs={"G": 0, "R": 1}, preserve_edge_attrs={"R": {"flow": None}}) +def build_flow_dict(G, R): + """Build a flow dictionary from a residual network.""" + flow_dict = {} + for u in G: + flow_dict[u] = dict.fromkeys(G[u], 0) + flow_dict[u].update( + (v, attr["flow"]) for v, attr in R[u].items() if attr["flow"] > 0 + ) + return flow_dict diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/graph_hashing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/graph_hashing.py new file mode 100644 index 0000000..92a69cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/graph_hashing.py @@ -0,0 +1,435 @@ +""" +Functions for hashing graphs to strings. +Isomorphic graphs should be assigned identical hashes. +For now, only Weisfeiler-Lehman hashing is implemented. +""" + +import warnings +from collections import Counter, defaultdict +from hashlib import blake2b + +import networkx as nx + +__all__ = ["weisfeiler_lehman_graph_hash", "weisfeiler_lehman_subgraph_hashes"] + + +def _hash_label(label, digest_size): + return blake2b(label.encode("ascii"), digest_size=digest_size).hexdigest() + + +def _init_node_labels(G, edge_attr, node_attr): + if node_attr: + return {u: str(dd[node_attr]) for u, dd in G.nodes(data=True)} + elif edge_attr: + return dict.fromkeys(G, "") + else: + warnings.warn( + "The hashes produced for graphs without node or edge attributes" + "changed in v3.5 due to a bugfix (see documentation).", + UserWarning, + stacklevel=2, + ) + if nx.is_directed(G): + return {u: str(G.in_degree(u)) + "_" + str(G.out_degree(u)) for u in G} + else: + return {u: str(deg) for u, deg in G.degree()} + + +def _neighborhood_aggregate_undirected(G, node, node_labels, edge_attr=None): + """ + Compute new labels for given node in an undirected graph by aggregating + the labels of each node's neighbors. + """ + label_list = [] + for nbr in G.neighbors(node): + prefix = "" if edge_attr is None else str(G[node][nbr][edge_attr]) + label_list.append(prefix + node_labels[nbr]) + return node_labels[node] + "".join(sorted(label_list)) + + +def _neighborhood_aggregate_directed(G, node, node_labels, edge_attr=None): + """ + Compute new labels for given node in a directed graph by aggregating + the labels of each node's neighbors. + """ + successor_labels = [] + for nbr in G.successors(node): + prefix = "s_" + "" if edge_attr is None else str(G[node][nbr][edge_attr]) + successor_labels.append(prefix + node_labels[nbr]) + + predecessor_labels = [] + for nbr in G.predecessors(node): + prefix = "p_" + "" if edge_attr is None else str(G[nbr][node][edge_attr]) + predecessor_labels.append(prefix + node_labels[nbr]) + return ( + node_labels[node] + + "".join(sorted(successor_labels)) + + "".join(sorted(predecessor_labels)) + ) + + +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +def weisfeiler_lehman_graph_hash( + G, edge_attr=None, node_attr=None, iterations=3, digest_size=16 +): + """Return Weisfeiler Lehman (WL) graph hash. + + .. Warning:: Hash values for directed graphs and graphs without edge or + node attributes have changed in v3.5. In previous versions, + directed graphs did not distinguish in- and outgoing edges. Also, + graphs without attributes set initial states such that effectively + one extra iteration of WL occurred than indicated by `iterations`. + For undirected graphs without node or edge labels, the old + hashes can be obtained by increasing the iteration count by one. + For more details, see `issue #7806 + `_. + + The function iteratively aggregates and hashes neighborhoods of each node. + After each node's neighbors are hashed to obtain updated node labels, + a hashed histogram of resulting labels is returned as the final hash. + + Hashes are identical for isomorphic graphs and strong guarantees that + non-isomorphic graphs will get different hashes. See [1]_ for details. + + If no node or edge attributes are provided, the degree of each node + is used as its initial label. + Otherwise, node and/or edge labels are used to compute the hash. + + Parameters + ---------- + G : graph + The graph to be hashed. + Can have node and/or edge attributes. Can also have no attributes. + edge_attr : string, optional (default=None) + The key in edge attribute dictionary to be used for hashing. + If None, edge labels are ignored. + node_attr: string, optional (default=None) + The key in node attribute dictionary to be used for hashing. + If None, and no edge_attr given, use the degrees of the nodes as labels. + iterations: int, optional (default=3) + Number of neighbor aggregations to perform. + Should be larger for larger graphs. + digest_size: int, optional (default=16) + Size (in bytes) of blake2b hash digest to use for hashing node labels. + + Returns + ------- + h : string + Hexadecimal string corresponding to hash of `G` (length ``2 * digest_size``). + + Raises + ------ + ValueError + If `iterations` is not a positve number. + + Examples + -------- + Two graphs with edge attributes that are isomorphic, except for + differences in the edge labels. + + >>> G1 = nx.Graph() + >>> G1.add_edges_from( + ... [ + ... (1, 2, {"label": "A"}), + ... (2, 3, {"label": "A"}), + ... (3, 1, {"label": "A"}), + ... (1, 4, {"label": "B"}), + ... ] + ... ) + >>> G2 = nx.Graph() + >>> G2.add_edges_from( + ... [ + ... (5, 6, {"label": "B"}), + ... (6, 7, {"label": "A"}), + ... (7, 5, {"label": "A"}), + ... (7, 8, {"label": "A"}), + ... ] + ... ) + + Omitting the `edge_attr` option, results in identical hashes. + + >>> nx.weisfeiler_lehman_graph_hash(G1) + 'c045439172215f49e0bef8c3d26c6b61' + >>> nx.weisfeiler_lehman_graph_hash(G2) + 'c045439172215f49e0bef8c3d26c6b61' + + With edge labels, the graphs are no longer assigned + the same hash digest. + + >>> nx.weisfeiler_lehman_graph_hash(G1, edge_attr="label") + 'c653d85538bcf041d88c011f4f905f10' + >>> nx.weisfeiler_lehman_graph_hash(G2, edge_attr="label") + '3dcd84af1ca855d0eff3c978d88e7ec7' + + Notes + ----- + To return the WL hashes of each subgraph of a graph, use + `weisfeiler_lehman_subgraph_hashes` + + Similarity between hashes does not imply similarity between graphs. + + References + ---------- + .. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen, + Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman + Graph Kernels. Journal of Machine Learning Research. 2011. + http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf + + See also + -------- + weisfeiler_lehman_subgraph_hashes + """ + + if G.is_directed(): + _neighborhood_aggregate = _neighborhood_aggregate_directed + warnings.warn( + "The hashes produced for directed graphs changed in version v3.5" + " due to a bugfix to track in and out edges separately (see documentation).", + UserWarning, + stacklevel=2, + ) + else: + _neighborhood_aggregate = _neighborhood_aggregate_undirected + + def weisfeiler_lehman_step(G, labels, edge_attr=None): + """ + Apply neighborhood aggregation to each node + in the graph. + Computes a dictionary with labels for each node. + """ + new_labels = {} + for node in G.nodes(): + label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr) + new_labels[node] = _hash_label(label, digest_size) + return new_labels + + if iterations <= 0: + raise ValueError("The WL algorithm requires that `iterations` be positive") + + # set initial node labels + node_labels = _init_node_labels(G, edge_attr, node_attr) + + # If the graph has no attributes, initial labels are the nodes' degrees. + # This is equivalent to doing the first iterations of WL. + if not edge_attr and not node_attr: + iterations -= 1 + + subgraph_hash_counts = [] + for _ in range(iterations): + node_labels = weisfeiler_lehman_step(G, node_labels, edge_attr=edge_attr) + counter = Counter(node_labels.values()) + # sort the counter, extend total counts + subgraph_hash_counts.extend(sorted(counter.items(), key=lambda x: x[0])) + + # hash the final counter + return _hash_label(str(tuple(subgraph_hash_counts)), digest_size) + + +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +def weisfeiler_lehman_subgraph_hashes( + G, + edge_attr=None, + node_attr=None, + iterations=3, + digest_size=16, + include_initial_labels=False, +): + """ + Return a dictionary of subgraph hashes by node. + + .. Warning:: Hash values for directed graphs have changed in version + v3.5. In previous versions, directed graphs did not distinguish in- + and outgoing edges. + Graphs without attributes previously performed an extra iteration of + WL at initialisation, which was not visible in the output of this + function. This hash value is now included in the returned dictionary, + shifting the other calculated hashes one position to the right. To + obtain the same last subgraph hash, increase the number of iterations + by one. + For more details, see `issue #7806 + `_. + + Dictionary keys are nodes in `G`, and values are a list of hashes. + Each hash corresponds to a subgraph rooted at a given node u in `G`. + Lists of subgraph hashes are sorted in increasing order of depth from + their root node, with the hash at index i corresponding to a subgraph + of nodes at most i-hops (i edges) distance from u. Thus, each list will contain + `iterations` elements - a hash for a subgraph at each depth. If + `include_initial_labels` is set to `True`, each list will additionally + have contain a hash of the initial node label (or equivalently a + subgraph of depth 0) prepended, totalling ``iterations + 1`` elements. + + The function iteratively aggregates and hashes neighborhoods of each node. + This is achieved for each step by replacing for each node its label from + the previous iteration with its hashed 1-hop neighborhood aggregate. + The new node label is then appended to a list of node labels for each + node. + + To aggregate neighborhoods for a node $u$ at each step, all labels of + nodes adjacent to $u$ are concatenated. If the `edge_attr` parameter is set, + labels for each neighboring node are prefixed with the value of this attribute + along the connecting edge from this neighbor to node $u$. The resulting string + is then hashed to compress this information into a fixed digest size. + + Thus, at the i-th iteration, nodes within i hops influence any given + hashed node label. We can therefore say that at depth $i$ for node $u$ + we have a hash for a subgraph induced by the i-hop neighborhood of $u$. + + The output can be used to create general Weisfeiler-Lehman graph kernels, + or generate features for graphs or nodes - for example to generate 'words' in + a graph as seen in the 'graph2vec' algorithm. + See [1]_ & [2]_ respectively for details. + + Hashes are identical for isomorphic subgraphs and there exist strong + guarantees that non-isomorphic graphs will get different hashes. + See [1]_ for details. + + If no node or edge attributes are provided, the degree of each node + is used as its initial label. + Otherwise, node and/or edge labels are used to compute the hash. + + Parameters + ---------- + G : graph + The graph to be hashed. + Can have node and/or edge attributes. Can also have no attributes. + edge_attr : string, optional (default=None) + The key in edge attribute dictionary to be used for hashing. + If None, edge labels are ignored. + node_attr : string, optional (default=None) + The key in node attribute dictionary to be used for hashing. + If None, and no edge_attr given, use the degrees of the nodes as labels. + If None, and edge_attr is given, each node starts with an identical label. + iterations : int, optional (default=3) + Number of neighbor aggregations to perform. + Should be larger for larger graphs. + digest_size : int, optional (default=16) + Size (in bytes) of blake2b hash digest to use for hashing node labels. + The default size is 16 bytes. + include_initial_labels : bool, optional (default=False) + If True, include the hashed initial node label as the first subgraph + hash for each node. + + Returns + ------- + node_subgraph_hashes : dict + A dictionary with each key given by a node in G, and each value given + by the subgraph hashes in order of depth from the key node. + Hashes are hexadecimal strings (hence ``2 * digest_size`` long). + + + Raises + ------ + ValueError + If `iterations` is not a positve number. + + Examples + -------- + Finding similar nodes in different graphs: + + >>> G1 = nx.Graph() + >>> G1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 5), (4, 6), (5, 7), (6, 7)]) + >>> G2 = nx.Graph() + >>> G2.add_edges_from([(1, 3), (2, 3), (1, 6), (1, 5), (4, 6)]) + >>> g1_hashes = nx.weisfeiler_lehman_subgraph_hashes( + ... G1, iterations=4, digest_size=8 + ... ) + >>> g2_hashes = nx.weisfeiler_lehman_subgraph_hashes( + ... G2, iterations=4, digest_size=8 + ... ) + + Even though G1 and G2 are not isomorphic (they have different numbers of edges), + the hash sequence of depth 3 for node 1 in G1 and node 5 in G2 are similar: + + >>> g1_hashes[1] + ['f6fc42039fba3776', 'a93b64973cfc8897', 'db1b43ae35a1878f', '57872a7d2059c1c0'] + >>> g2_hashes[5] + ['f6fc42039fba3776', 'a93b64973cfc8897', 'db1b43ae35a1878f', '1716d2a4012fa4bc'] + + The first 3 WL subgraph hashes match. From this we can conclude that it's very + likely the neighborhood of 3 hops around these nodes are isomorphic. + + However the 4-hop neighborhoods of ``G1`` and ``G2`` are not isomorphic since the + 4th hashes in the lists above are not equal. + + These nodes may be candidates to be classified together since their local topology + is similar. + + Notes + ----- + To hash the full graph when subgraph hashes are not needed, use + `weisfeiler_lehman_graph_hash` for efficiency. + + Similarity between hashes does not imply similarity between graphs. + + References + ---------- + .. [1] Shervashidze, Nino, Pascal Schweitzer, Erik Jan Van Leeuwen, + Kurt Mehlhorn, and Karsten M. Borgwardt. Weisfeiler Lehman + Graph Kernels. Journal of Machine Learning Research. 2011. + http://www.jmlr.org/papers/volume12/shervashidze11a/shervashidze11a.pdf + .. [2] Annamalai Narayanan, Mahinthan Chandramohan, Rajasekar Venkatesan, + Lihui Chen, Yang Liu and Shantanu Jaiswa. graph2vec: Learning + Distributed Representations of Graphs. arXiv. 2017 + https://arxiv.org/pdf/1707.05005.pdf + + See also + -------- + weisfeiler_lehman_graph_hash + """ + + if G.is_directed(): + _neighborhood_aggregate = _neighborhood_aggregate_directed + warnings.warn( + "The hashes produced for directed graphs changed in v3.5" + " due to a bugfix (see documentation).", + UserWarning, + stacklevel=2, + ) + else: + _neighborhood_aggregate = _neighborhood_aggregate_undirected + + def weisfeiler_lehman_step(G, labels, node_subgraph_hashes, edge_attr=None): + """ + Apply neighborhood aggregation to each node + in the graph. + Computes a dictionary with labels for each node. + Appends the new hashed label to the dictionary of subgraph hashes + originating from and indexed by each node in G + """ + new_labels = {} + for node in G.nodes(): + label = _neighborhood_aggregate(G, node, labels, edge_attr=edge_attr) + hashed_label = _hash_label(label, digest_size) + new_labels[node] = hashed_label + node_subgraph_hashes[node].append(hashed_label) + return new_labels + + if iterations <= 0: + raise ValueError("The WL algorithm requires that `iterations` be positive") + + node_labels = _init_node_labels(G, edge_attr, node_attr) + + if include_initial_labels: + node_subgraph_hashes = { + k: [_hash_label(v, digest_size)] for k, v in node_labels.items() + } + else: + node_subgraph_hashes = defaultdict(list) + + # If the graph has no attributes, initial labels are the nodes' degrees. + # This is equivalent to doing the first iterations of WL. + if not edge_attr and not node_attr: + iterations -= 1 + for node in G.nodes(): + hashed_label = _hash_label(node_labels[node], digest_size) + node_subgraph_hashes[node].append(hashed_label) + + for _ in range(iterations): + node_labels = weisfeiler_lehman_step( + G, node_labels, node_subgraph_hashes, edge_attr + ) + + return dict(node_subgraph_hashes) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/graphical.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/graphical.py new file mode 100644 index 0000000..d5d82de --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/graphical.py @@ -0,0 +1,483 @@ +"""Test sequences for graphiness.""" + +import heapq + +import networkx as nx + +__all__ = [ + "is_graphical", + "is_multigraphical", + "is_pseudographical", + "is_digraphical", + "is_valid_degree_sequence_erdos_gallai", + "is_valid_degree_sequence_havel_hakimi", +] + + +@nx._dispatchable(graphs=None) +def is_graphical(sequence, method="eg"): + """Returns True if sequence is a valid degree sequence. + + A degree sequence is valid if some graph can realize it. + + Parameters + ---------- + sequence : list or iterable container + A sequence of integer node degrees + + method : "eg" | "hh" (default: 'eg') + The method used to validate the degree sequence. + "eg" corresponds to the Erdős-Gallai algorithm + [EG1960]_, [choudum1986]_, and + "hh" to the Havel-Hakimi algorithm + [havel1955]_, [hakimi1962]_, [CL1996]_. + + Returns + ------- + valid : bool + True if the sequence is a valid degree sequence and False if not. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> sequence = (d for n, d in G.degree()) + >>> nx.is_graphical(sequence) + True + + To test a non-graphical sequence: + >>> sequence_list = [d for n, d in G.degree()] + >>> sequence_list[-1] += 1 + >>> nx.is_graphical(sequence_list) + False + + References + ---------- + .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960. + .. [choudum1986] S.A. Choudum. "A simple proof of the Erdős-Gallai theorem on + graph sequences." Bulletin of the Australian Mathematical Society, 33, + pp 67-70, 1986. https://doi.org/10.1017/S0004972700002872 + .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs" + Casopis Pest. Mat. 80, 477-480, 1955. + .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as + Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962. + .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs", + Chapman and Hall/CRC, 1996. + """ + if method == "eg": + valid = is_valid_degree_sequence_erdos_gallai(list(sequence)) + elif method == "hh": + valid = is_valid_degree_sequence_havel_hakimi(list(sequence)) + else: + msg = "`method` must be 'eg' or 'hh'" + raise nx.NetworkXException(msg) + return valid + + +def _basic_graphical_tests(deg_sequence): + # Sort and perform some simple tests on the sequence + deg_sequence = nx.utils.make_list_of_ints(deg_sequence) + p = len(deg_sequence) + num_degs = [0] * p + dmax, dmin, dsum, n = 0, p, 0, 0 + for d in deg_sequence: + # Reject if degree is negative or larger than the sequence length + if d < 0 or d >= p: + raise nx.NetworkXUnfeasible + # Process only the non-zero integers + elif d > 0: + dmax, dmin, dsum, n = max(dmax, d), min(dmin, d), dsum + d, n + 1 + num_degs[d] += 1 + # Reject sequence if it has odd sum or is oversaturated + if dsum % 2 or dsum > n * (n - 1): + raise nx.NetworkXUnfeasible + return dmax, dmin, dsum, n, num_degs + + +@nx._dispatchable(graphs=None) +def is_valid_degree_sequence_havel_hakimi(deg_sequence): + r"""Returns True if deg_sequence can be realized by a simple graph. + + The validation proceeds using the Havel-Hakimi theorem + [havel1955]_, [hakimi1962]_, [CL1996]_. + Worst-case run time is $O(s)$ where $s$ is the sum of the sequence. + + Parameters + ---------- + deg_sequence : list + A list of integers where each element specifies the degree of a node + in a graph. + + Returns + ------- + valid : bool + True if deg_sequence is graphical and False if not. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)]) + >>> sequence = (d for _, d in G.degree()) + >>> nx.is_valid_degree_sequence_havel_hakimi(sequence) + True + + To test a non-valid sequence: + >>> sequence_list = [d for _, d in G.degree()] + >>> sequence_list[-1] += 1 + >>> nx.is_valid_degree_sequence_havel_hakimi(sequence_list) + False + + Notes + ----- + The ZZ condition says that for the sequence d if + + .. math:: + |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)} + + then d is graphical. This was shown in Theorem 6 in [1]_. + + References + ---------- + .. [1] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory + of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992). + .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs" + Casopis Pest. Mat. 80, 477-480, 1955. + .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as + Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962. + .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs", + Chapman and Hall/CRC, 1996. + """ + try: + dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence) + except nx.NetworkXUnfeasible: + return False + # Accept if sequence has no non-zero degrees or passes the ZZ condition + if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1): + return True + + modstubs = [0] * (dmax + 1) + # Successively reduce degree sequence by removing the maximum degree + while n > 0: + # Retrieve the maximum degree in the sequence + while num_degs[dmax] == 0: + dmax -= 1 + # If there are not enough stubs to connect to, then the sequence is + # not graphical + if dmax > n - 1: + return False + + # Remove largest stub in list + num_degs[dmax], n = num_degs[dmax] - 1, n - 1 + # Reduce the next dmax largest stubs + mslen = 0 + k = dmax + for i in range(dmax): + while num_degs[k] == 0: + k -= 1 + num_degs[k], n = num_degs[k] - 1, n - 1 + if k > 1: + modstubs[mslen] = k - 1 + mslen += 1 + # Add back to the list any non-zero stubs that were removed + for i in range(mslen): + stub = modstubs[i] + num_degs[stub], n = num_degs[stub] + 1, n + 1 + return True + + +@nx._dispatchable(graphs=None) +def is_valid_degree_sequence_erdos_gallai(deg_sequence): + r"""Returns True if deg_sequence can be realized by a simple graph. + + The validation is done using the Erdős-Gallai theorem [EG1960]_. + + Parameters + ---------- + deg_sequence : list + A list of integers + + Returns + ------- + valid : bool + True if deg_sequence is graphical and False if not. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)]) + >>> sequence = (d for _, d in G.degree()) + >>> nx.is_valid_degree_sequence_erdos_gallai(sequence) + True + + To test a non-valid sequence: + >>> sequence_list = [d for _, d in G.degree()] + >>> sequence_list[-1] += 1 + >>> nx.is_valid_degree_sequence_erdos_gallai(sequence_list) + False + + Notes + ----- + + This implementation uses an equivalent form of the Erdős-Gallai criterion. + Worst-case run time is $O(n)$ where $n$ is the length of the sequence. + + Specifically, a sequence d is graphical if and only if the + sum of the sequence is even and for all strong indices k in the sequence, + + .. math:: + + \sum_{i=1}^{k} d_i \leq k(k-1) + \sum_{j=k+1}^{n} \min(d_i,k) + = k(n-1) - ( k \sum_{j=0}^{k-1} n_j - \sum_{j=0}^{k-1} j n_j ) + + A strong index k is any index where d_k >= k and the value n_j is the + number of occurrences of j in d. The maximal strong index is called the + Durfee index. + + This particular rearrangement comes from the proof of Theorem 3 in [2]_. + + The ZZ condition says that for the sequence d if + + .. math:: + |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)} + + then d is graphical. This was shown in Theorem 6 in [2]_. + + References + ---------- + .. [1] A. Tripathi and S. Vijay. "A note on a theorem of Erdős & Gallai", + Discrete Mathematics, 265, pp. 417-420 (2003). + .. [2] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory + of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992). + .. [EG1960] Erdős and Gallai, Mat. Lapok 11 264, 1960. + """ + try: + dmax, dmin, dsum, n, num_degs = _basic_graphical_tests(deg_sequence) + except nx.NetworkXUnfeasible: + return False + # Accept if sequence has no non-zero degrees or passes the ZZ condition + if n == 0 or 4 * dmin * n >= (dmax + dmin + 1) * (dmax + dmin + 1): + return True + + # Perform the EG checks using the reformulation of Zverovich and Zverovich + k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0 + for dk in range(dmax, dmin - 1, -1): + if dk < k + 1: # Check if already past Durfee index + return True + if num_degs[dk] > 0: + run_size = num_degs[dk] # Process a run of identical-valued degrees + if dk < k + run_size: # Check if end of run is past Durfee index + run_size = dk - k # Adjust back to Durfee index + sum_deg += run_size * dk + for v in range(run_size): + sum_nj += num_degs[k + v] + sum_jnj += (k + v) * num_degs[k + v] + k += run_size + if sum_deg > k * (n - 1) - k * sum_nj + sum_jnj: + return False + return True + + +@nx._dispatchable(graphs=None) +def is_multigraphical(sequence): + """Returns True if some multigraph can realize the sequence. + + Parameters + ---------- + sequence : list + A list of integers + + Returns + ------- + valid : bool + True if deg_sequence is a multigraphic degree sequence and False if not. + + Examples + -------- + >>> G = nx.MultiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)]) + >>> sequence = (d for _, d in G.degree()) + >>> nx.is_multigraphical(sequence) + True + + To test a non-multigraphical sequence: + >>> sequence_list = [d for _, d in G.degree()] + >>> sequence_list[-1] += 1 + >>> nx.is_multigraphical(sequence_list) + False + + Notes + ----- + The worst-case run time is $O(n)$ where $n$ is the length of the sequence. + + References + ---------- + .. [1] S. L. Hakimi. "On the realizability of a set of integers as + degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506 + (1962). + """ + try: + deg_sequence = nx.utils.make_list_of_ints(sequence) + except nx.NetworkXError: + return False + dsum, dmax = 0, 0 + for d in deg_sequence: + if d < 0: + return False + dsum, dmax = dsum + d, max(dmax, d) + if dsum % 2 or dsum < 2 * dmax: + return False + return True + + +@nx._dispatchable(graphs=None) +def is_pseudographical(sequence): + """Returns True if some pseudograph can realize the sequence. + + Every nonnegative integer sequence with an even sum is pseudographical + (see [1]_). + + Parameters + ---------- + sequence : list or iterable container + A sequence of integer node degrees + + Returns + ------- + valid : bool + True if the sequence is a pseudographic degree sequence and False if not. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)]) + >>> sequence = (d for _, d in G.degree()) + >>> nx.is_pseudographical(sequence) + True + + To test a non-pseudographical sequence: + >>> sequence_list = [d for _, d in G.degree()] + >>> sequence_list[-1] += 1 + >>> nx.is_pseudographical(sequence_list) + False + + Notes + ----- + The worst-case run time is $O(n)$ where n is the length of the sequence. + + References + ---------- + .. [1] F. Boesch and F. Harary. "Line removal algorithms for graphs + and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12), + pp. 778-782 (1976). + """ + try: + deg_sequence = nx.utils.make_list_of_ints(sequence) + except nx.NetworkXError: + return False + return sum(deg_sequence) % 2 == 0 and min(deg_sequence) >= 0 + + +@nx._dispatchable(graphs=None) +def is_digraphical(in_sequence, out_sequence): + r"""Returns True if some directed graph can realize the in- and out-degree + sequences. + + Parameters + ---------- + in_sequence : list or iterable container + A sequence of integer node in-degrees + + out_sequence : list or iterable container + A sequence of integer node out-degrees + + Returns + ------- + valid : bool + True if in and out-sequences are digraphic False if not. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (4, 2), (5, 1), (5, 4)]) + >>> in_seq = (d for n, d in G.in_degree()) + >>> out_seq = (d for n, d in G.out_degree()) + >>> nx.is_digraphical(in_seq, out_seq) + True + + To test a non-digraphical scenario: + >>> in_seq_list = [d for n, d in G.in_degree()] + >>> in_seq_list[-1] += 1 + >>> nx.is_digraphical(in_seq_list, out_seq) + False + + Notes + ----- + This algorithm is from Kleitman and Wang [1]_. + The worst case runtime is $O(s \times \log n)$ where $s$ and $n$ are the + sum and length of the sequences respectively. + + References + ---------- + .. [1] D.J. Kleitman and D.L. Wang + Algorithms for Constructing Graphs and Digraphs with Given Valences + and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973) + """ + try: + in_deg_sequence = nx.utils.make_list_of_ints(in_sequence) + out_deg_sequence = nx.utils.make_list_of_ints(out_sequence) + except nx.NetworkXError: + return False + # Process the sequences and form two heaps to store degree pairs with + # either zero or non-zero out degrees + sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence) + maxn = max(nin, nout) + maxin = 0 + if maxn == 0: + return True + stubheap, zeroheap = [], [] + for n in range(maxn): + in_deg, out_deg = 0, 0 + if n < nout: + out_deg = out_deg_sequence[n] + if n < nin: + in_deg = in_deg_sequence[n] + if in_deg < 0 or out_deg < 0: + return False + sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg) + if in_deg > 0: + stubheap.append((-1 * out_deg, -1 * in_deg)) + elif out_deg > 0: + zeroheap.append(-1 * out_deg) + if sumin != sumout: + return False + heapq.heapify(stubheap) + heapq.heapify(zeroheap) + + modstubs = [(0, 0)] * (maxin + 1) + # Successively reduce degree sequence by removing the maximum out degree + while stubheap: + # Take the first value in the sequence with non-zero in degree + (freeout, freein) = heapq.heappop(stubheap) + freein *= -1 + if freein > len(stubheap) + len(zeroheap): + return False + + # Attach out stubs to the nodes with the most in stubs + mslen = 0 + for i in range(freein): + if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0]): + stubout = heapq.heappop(zeroheap) + stubin = 0 + else: + (stubout, stubin) = heapq.heappop(stubheap) + if stubout == 0: + return False + # Check if target is now totally connected + if stubout + 1 < 0 or stubin < 0: + modstubs[mslen] = (stubout + 1, stubin) + mslen += 1 + + # Add back the nodes to the heap that still have available stubs + for i in range(mslen): + stub = modstubs[i] + if stub[1] < 0: + heapq.heappush(stubheap, stub) + else: + heapq.heappush(zeroheap, stub[0]) + if freeout < 0: + heapq.heappush(zeroheap, freeout) + return True diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/hierarchy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/hierarchy.py new file mode 100644 index 0000000..d5a0552 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/hierarchy.py @@ -0,0 +1,57 @@ +""" +Flow Hierarchy. +""" + +import networkx as nx + +__all__ = ["flow_hierarchy"] + + +@nx._dispatchable(edge_attrs="weight") +def flow_hierarchy(G, weight=None): + """Returns the flow hierarchy of a directed network. + + Flow hierarchy is defined as the fraction of edges not participating + in cycles in a directed graph [1]_. + + Parameters + ---------- + G : DiGraph or MultiDiGraph + A directed graph + + weight : string, optional (default=None) + Attribute to use for edge weights. If None the weight defaults to 1. + + Returns + ------- + h : float + Flow hierarchy value + + Raises + ------ + NetworkXError + If `G` is not a directed graph or if `G` has no edges. + + Notes + ----- + The algorithm described in [1]_ computes the flow hierarchy through + exponentiation of the adjacency matrix. This function implements an + alternative approach that finds strongly connected components. + An edge is in a cycle if and only if it is in a strongly connected + component, which can be found in $O(m)$ time using Tarjan's algorithm. + + References + ---------- + .. [1] Luo, J.; Magee, C.L. (2011), + Detecting evolving patterns of self-organizing networks by flow + hierarchy measurement, Complexity, Volume 16 Issue 6 53-61. + DOI: 10.1002/cplx.20368 + http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf + """ + # corner case: G has no edges + if nx.is_empty(G): + raise nx.NetworkXError("flow_hierarchy not applicable to empty graphs") + if not G.is_directed(): + raise nx.NetworkXError("G must be a digraph in flow_hierarchy") + scc = nx.strongly_connected_components(G) + return 1 - sum(G.subgraph(c).size(weight) for c in scc) / G.size(weight) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/hybrid.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/hybrid.py new file mode 100644 index 0000000..9d3dd30 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/hybrid.py @@ -0,0 +1,196 @@ +""" +Provides functions for finding and testing for locally `(k, l)`-connected +graphs. + +""" + +import copy + +import networkx as nx + +__all__ = ["kl_connected_subgraph", "is_kl_connected"] + + +@nx._dispatchable(returns_graph=True) +def kl_connected_subgraph(G, k, l, low_memory=False, same_as_graph=False): + """Returns the maximum locally `(k, l)`-connected subgraph of `G`. + + A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the + graph there are at least `l` edge-disjoint paths of length at most `k` + joining `u` to `v`. + + Parameters + ---------- + G : NetworkX graph + The graph in which to find a maximum locally `(k, l)`-connected + subgraph. + + k : integer + The maximum length of paths to consider. A higher number means a looser + connectivity requirement. + + l : integer + The number of edge-disjoint paths. A higher number means a stricter + connectivity requirement. + + low_memory : bool + If this is True, this function uses an algorithm that uses slightly + more time but less memory. + + same_as_graph : bool + If True then return a tuple of the form `(H, is_same)`, + where `H` is the maximum locally `(k, l)`-connected subgraph and + `is_same` is a Boolean representing whether `G` is locally `(k, + l)`-connected (and hence, whether `H` is simply a copy of the input + graph `G`). + + Returns + ------- + NetworkX graph or two-tuple + If `same_as_graph` is True, then this function returns a + two-tuple as described above. Otherwise, it returns only the maximum + locally `(k, l)`-connected subgraph. + + See also + -------- + is_kl_connected + + References + ---------- + .. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid + Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg, + 2004. 89--104. + + """ + H = copy.deepcopy(G) # subgraph we construct by removing from G + + graphOK = True + deleted_some = True # hack to start off the while loop + while deleted_some: + deleted_some = False + # We use `for edge in list(H.edges()):` instead of + # `for edge in H.edges():` because we edit the graph `H` in + # the loop. Hence using an iterator will result in + # `RuntimeError: dictionary changed size during iteration` + for edge in list(H.edges()): + (u, v) = edge + # Get copy of graph needed for this search + if low_memory: + verts = {u, v} + for i in range(k): + for w in verts.copy(): + verts.update(G[w]) + G2 = G.subgraph(verts).copy() + else: + G2 = copy.deepcopy(G) + ### + path = [u, v] + cnt = 0 + accept = 0 + while path: + cnt += 1 # Found a path + if cnt >= l: + accept = 1 + break + # record edges along this graph + prev = u + for w in path: + if prev != w: + G2.remove_edge(prev, w) + prev = w + # path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? + try: + path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1? + except nx.NetworkXNoPath: + path = False + # No Other Paths + if accept == 0: + H.remove_edge(u, v) + deleted_some = True + if graphOK: + graphOK = False + # We looked through all edges and removed none of them. + # So, H is the maximal (k,l)-connected subgraph of G + if same_as_graph: + return (H, graphOK) + return H + + +@nx._dispatchable +def is_kl_connected(G, k, l, low_memory=False): + """Returns True if and only if `G` is locally `(k, l)`-connected. + + A graph is locally `(k, l)`-connected if for each edge `(u, v)` in the + graph there are at least `l` edge-disjoint paths of length at most `k` + joining `u` to `v`. + + Parameters + ---------- + G : NetworkX graph + The graph to test for local `(k, l)`-connectedness. + + k : integer + The maximum length of paths to consider. A higher number means a looser + connectivity requirement. + + l : integer + The number of edge-disjoint paths. A higher number means a stricter + connectivity requirement. + + low_memory : bool + If this is True, this function uses an algorithm that uses slightly + more time but less memory. + + Returns + ------- + bool + Whether the graph is locally `(k, l)`-connected subgraph. + + See also + -------- + kl_connected_subgraph + + References + ---------- + .. [1] Chung, Fan and Linyuan Lu. "The Small World Phenomenon in Hybrid + Power Law Graphs." *Complex Networks*. Springer Berlin Heidelberg, + 2004. 89--104. + + """ + graphOK = True + for edge in G.edges(): + (u, v) = edge + # Get copy of graph needed for this search + if low_memory: + verts = {u, v} + for i in range(k): + [verts.update(G.neighbors(w)) for w in verts.copy()] + G2 = G.subgraph(verts) + else: + G2 = copy.deepcopy(G) + ### + path = [u, v] + cnt = 0 + accept = 0 + while path: + cnt += 1 # Found a path + if cnt >= l: + accept = 1 + break + # record edges along this graph + prev = u + for w in path: + if w != prev: + G2.remove_edge(prev, w) + prev = w + # path = shortest_path(G2, u, v, k) # ??? should "Cutoff" be k+1? + try: + path = nx.shortest_path(G2, u, v) # ??? should "Cutoff" be k+1? + except nx.NetworkXNoPath: + path = False + # No Other Paths + if accept == 0: + graphOK = False + break + # return status + return graphOK diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isolate.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isolate.py new file mode 100644 index 0000000..1ea8abe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isolate.py @@ -0,0 +1,107 @@ +""" +Functions for identifying isolate (degree zero) nodes. +""" + +import networkx as nx + +__all__ = ["is_isolate", "isolates", "number_of_isolates"] + + +@nx._dispatchable +def is_isolate(G, n): + """Determines whether a node is an isolate. + + An *isolate* is a node with no neighbors (that is, with degree + zero). For directed graphs, this means no in-neighbors and no + out-neighbors. + + Parameters + ---------- + G : NetworkX graph + + n : node + A node in `G`. + + Returns + ------- + is_isolate : bool + True if and only if `n` has no neighbors. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edge(1, 2) + >>> G.add_node(3) + >>> nx.is_isolate(G, 2) + False + >>> nx.is_isolate(G, 3) + True + """ + return G.degree(n) == 0 + + +@nx._dispatchable +def isolates(G): + """Iterator over isolates in the graph. + + An *isolate* is a node with no neighbors (that is, with degree + zero). For directed graphs, this means no in-neighbors and no + out-neighbors. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + iterator + An iterator over the isolates of `G`. + + Examples + -------- + To get a list of all isolates of a graph, use the :class:`list` + constructor:: + + >>> G = nx.Graph() + >>> G.add_edge(1, 2) + >>> G.add_node(3) + >>> list(nx.isolates(G)) + [3] + + To remove all isolates in the graph, first create a list of the + isolates, then use :meth:`Graph.remove_nodes_from`:: + + >>> G.remove_nodes_from(list(nx.isolates(G))) + >>> list(G) + [1, 2] + + For digraphs, isolates have zero in-degree and zero out_degre:: + + >>> G = nx.DiGraph([(0, 1), (1, 2)]) + >>> G.add_node(3) + >>> list(nx.isolates(G)) + [3] + + """ + return (n for n, d in G.degree() if d == 0) + + +@nx._dispatchable +def number_of_isolates(G): + """Returns the number of isolates in the graph. + + An *isolate* is a node with no neighbors (that is, with degree + zero). For directed graphs, this means no in-neighbors and no + out-neighbors. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + int + The number of degree zero nodes in the graph `G`. + + """ + return sum(1 for v in isolates(G)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py new file mode 100644 index 0000000..58c2268 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__init__.py @@ -0,0 +1,7 @@ +from networkx.algorithms.isomorphism.isomorph import * +from networkx.algorithms.isomorphism.vf2userfunc import * +from networkx.algorithms.isomorphism.matchhelpers import * +from networkx.algorithms.isomorphism.temporalisomorphvf2 import * +from networkx.algorithms.isomorphism.ismags import * +from networkx.algorithms.isomorphism.tree_isomorphism import * +from networkx.algorithms.isomorphism.vf2pp import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d6f0ec7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-312.pyc new file mode 100644 index 0000000..f815ef4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/ismags.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-312.pyc new file mode 100644 index 0000000..eeb2497 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-312.pyc new file mode 100644 index 0000000..cb8fb10 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/isomorphvf2.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-312.pyc new file mode 100644 index 0000000..7f4fd19 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/matchhelpers.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-312.pyc new file mode 100644 index 0000000..795b2f2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/temporalisomorphvf2.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-312.pyc new file mode 100644 index 0000000..eee53ec Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/tree_isomorphism.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-312.pyc new file mode 100644 index 0000000..2af9ef0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2pp.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-312.pyc new file mode 100644 index 0000000..6c7903a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/__pycache__/vf2userfunc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py new file mode 100644 index 0000000..0387dbe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/ismags.py @@ -0,0 +1,1174 @@ +""" +ISMAGS Algorithm +================ + +Provides a Python implementation of the ISMAGS algorithm. [1]_ + +It is capable of finding (subgraph) isomorphisms between two graphs, taking the +symmetry of the subgraph into account. In most cases the VF2 algorithm is +faster (at least on small graphs) than this implementation, but in some cases +there is an exponential number of isomorphisms that are symmetrically +equivalent. In that case, the ISMAGS algorithm will provide only one solution +per symmetry group. + +>>> petersen = nx.petersen_graph() +>>> ismags = nx.isomorphism.ISMAGS(petersen, petersen) +>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=False)) +>>> len(isomorphisms) +120 +>>> isomorphisms = list(ismags.isomorphisms_iter(symmetry=True)) +>>> answer = [{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}] +>>> answer == isomorphisms +True + +In addition, this implementation also provides an interface to find the +largest common induced subgraph [2]_ between any two graphs, again taking +symmetry into account. Given `graph` and `subgraph` the algorithm will remove +nodes from the `subgraph` until `subgraph` is isomorphic to a subgraph of +`graph`. Since only the symmetry of `subgraph` is taken into account it is +worth thinking about how you provide your graphs: + +>>> graph1 = nx.path_graph(4) +>>> graph2 = nx.star_graph(3) +>>> ismags = nx.isomorphism.ISMAGS(graph1, graph2) +>>> ismags.is_isomorphic() +False +>>> largest_common_subgraph = list(ismags.largest_common_subgraph()) +>>> answer = [{1: 0, 0: 1, 2: 2}, {2: 0, 1: 1, 3: 2}] +>>> answer == largest_common_subgraph +True +>>> ismags2 = nx.isomorphism.ISMAGS(graph2, graph1) +>>> largest_common_subgraph = list(ismags2.largest_common_subgraph()) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 0: 1, 3: 2}, +... {2: 0, 0: 1, 1: 2}, +... {2: 0, 0: 1, 3: 2}, +... {3: 0, 0: 1, 1: 2}, +... {3: 0, 0: 1, 2: 2}, +... ] +>>> answer == largest_common_subgraph +True + +However, when not taking symmetry into account, it doesn't matter: + +>>> largest_common_subgraph = list(ismags.largest_common_subgraph(symmetry=False)) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 2: 1, 0: 2}, +... {2: 0, 1: 1, 3: 2}, +... {2: 0, 3: 1, 1: 2}, +... {1: 0, 0: 1, 2: 3}, +... {1: 0, 2: 1, 0: 3}, +... {2: 0, 1: 1, 3: 3}, +... {2: 0, 3: 1, 1: 3}, +... {1: 0, 0: 2, 2: 3}, +... {1: 0, 2: 2, 0: 3}, +... {2: 0, 1: 2, 3: 3}, +... {2: 0, 3: 2, 1: 3}, +... ] +>>> answer == largest_common_subgraph +True +>>> largest_common_subgraph = list(ismags2.largest_common_subgraph(symmetry=False)) +>>> answer = [ +... {1: 0, 0: 1, 2: 2}, +... {1: 0, 0: 1, 3: 2}, +... {2: 0, 0: 1, 1: 2}, +... {2: 0, 0: 1, 3: 2}, +... {3: 0, 0: 1, 1: 2}, +... {3: 0, 0: 1, 2: 2}, +... {1: 1, 0: 2, 2: 3}, +... {1: 1, 0: 2, 3: 3}, +... {2: 1, 0: 2, 1: 3}, +... {2: 1, 0: 2, 3: 3}, +... {3: 1, 0: 2, 1: 3}, +... {3: 1, 0: 2, 2: 3}, +... ] +>>> answer == largest_common_subgraph +True + +Notes +----- +- The current implementation works for undirected graphs only. The algorithm + in general should work for directed graphs as well though. +- Node keys for both provided graphs need to be fully orderable as well as + hashable. +- Node and edge equality is assumed to be transitive: if A is equal to B, and + B is equal to C, then A is equal to C. + +References +---------- +.. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle, + M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General + Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph + Enumeration", PLoS One 9(5): e97896, 2014. + https://doi.org/10.1371/journal.pone.0097896 +.. [2] https://en.wikipedia.org/wiki/Maximum_common_induced_subgraph +""" + +__all__ = ["ISMAGS"] + +import itertools +from collections import Counter, defaultdict +from functools import reduce, wraps + + +def are_all_equal(iterable): + """ + Returns ``True`` if and only if all elements in `iterable` are equal; and + ``False`` otherwise. + + Parameters + ---------- + iterable: collections.abc.Iterable + The container whose elements will be checked. + + Returns + ------- + bool + ``True`` iff all elements in `iterable` compare equal, ``False`` + otherwise. + """ + try: + shape = iterable.shape + except AttributeError: + pass + else: + if len(shape) > 1: + message = "The function does not works on multidimensional arrays." + raise NotImplementedError(message) from None + + iterator = iter(iterable) + first = next(iterator, None) + return all(item == first for item in iterator) + + +def make_partitions(items, test): + """ + Partitions items into sets based on the outcome of ``test(item1, item2)``. + Pairs of items for which `test` returns `True` end up in the same set. + + Parameters + ---------- + items : collections.abc.Iterable[collections.abc.Hashable] + Items to partition + test : collections.abc.Callable[collections.abc.Hashable, collections.abc.Hashable] + A function that will be called with 2 arguments, taken from items. + Should return `True` if those 2 items need to end up in the same + partition, and `False` otherwise. + + Returns + ------- + list[set] + A list of sets, with each set containing part of the items in `items`, + such that ``all(test(*pair) for pair in itertools.combinations(set, 2)) + == True`` + + Notes + ----- + The function `test` is assumed to be transitive: if ``test(a, b)`` and + ``test(b, c)`` return ``True``, then ``test(a, c)`` must also be ``True``. + """ + partitions = [] + for item in items: + for partition in partitions: + p_item = next(iter(partition)) + if test(item, p_item): + partition.add(item) + break + else: # No break + partitions.append({item}) + return partitions + + +def partition_to_color(partitions): + """ + Creates a dictionary that maps each item in each partition to the index of + the partition to which it belongs. + + Parameters + ---------- + partitions: collections.abc.Sequence[collections.abc.Iterable] + As returned by :func:`make_partitions`. + + Returns + ------- + dict + """ + colors = {} + for color, keys in enumerate(partitions): + for key in keys: + colors[key] = color + return colors + + +def intersect(collection_of_sets): + """ + Given an collection of sets, returns the intersection of those sets. + + Parameters + ---------- + collection_of_sets: collections.abc.Collection[set] + A collection of sets. + + Returns + ------- + set + An intersection of all sets in `collection_of_sets`. Will have the same + type as the item initially taken from `collection_of_sets`. + """ + collection_of_sets = list(collection_of_sets) + first = collection_of_sets.pop() + out = reduce(set.intersection, collection_of_sets, set(first)) + return type(first)(out) + + +class ISMAGS: + """ + Implements the ISMAGS subgraph matching algorithm. [1]_ ISMAGS stands for + "Index-based Subgraph Matching Algorithm with General Symmetries". As the + name implies, it is symmetry aware and will only generate non-symmetric + isomorphisms. + + Notes + ----- + The implementation imposes additional conditions compared to the VF2 + algorithm on the graphs provided and the comparison functions + (:attr:`node_equality` and :attr:`edge_equality`): + + - Node keys in both graphs must be orderable as well as hashable. + - Equality must be transitive: if A is equal to B, and B is equal to C, + then A must be equal to C. + + Attributes + ---------- + graph: networkx.Graph + subgraph: networkx.Graph + node_equality: collections.abc.Callable + The function called to see if two nodes should be considered equal. + It's signature looks like this: + ``f(graph1: networkx.Graph, node1, graph2: networkx.Graph, node2) -> bool``. + `node1` is a node in `graph1`, and `node2` a node in `graph2`. + Constructed from the argument `node_match`. + edge_equality: collections.abc.Callable + The function called to see if two edges should be considered equal. + It's signature looks like this: + ``f(graph1: networkx.Graph, edge1, graph2: networkx.Graph, edge2) -> bool``. + `edge1` is an edge in `graph1`, and `edge2` an edge in `graph2`. + Constructed from the argument `edge_match`. + + References + ---------- + .. [1] M. Houbraken, S. Demeyer, T. Michoel, P. Audenaert, D. Colle, + M. Pickavet, "The Index-Based Subgraph Matching Algorithm with General + Symmetries (ISMAGS): Exploiting Symmetry for Faster Subgraph + Enumeration", PLoS One 9(5): e97896, 2014. + https://doi.org/10.1371/journal.pone.0097896 + """ + + def __init__(self, graph, subgraph, node_match=None, edge_match=None, cache=None): + """ + Parameters + ---------- + graph: networkx.Graph + subgraph: networkx.Graph + node_match: collections.abc.Callable or None + Function used to determine whether two nodes are equivalent. Its + signature should look like ``f(n1: dict, n2: dict) -> bool``, with + `n1` and `n2` node property dicts. See also + :func:`~networkx.algorithms.isomorphism.categorical_node_match` and + friends. + If `None`, all nodes are considered equal. + edge_match: collections.abc.Callable or None + Function used to determine whether two edges are equivalent. Its + signature should look like ``f(e1: dict, e2: dict) -> bool``, with + `e1` and `e2` edge property dicts. See also + :func:`~networkx.algorithms.isomorphism.categorical_edge_match` and + friends. + If `None`, all edges are considered equal. + cache: collections.abc.Mapping + A cache used for caching graph symmetries. + """ + # TODO: graph and subgraph setter methods that invalidate the caches. + # TODO: allow for precomputed partitions and colors + self.graph = graph + self.subgraph = subgraph + self._symmetry_cache = cache + # Naming conventions are taken from the original paper. For your + # sanity: + # sg: subgraph + # g: graph + # e: edge(s) + # n: node(s) + # So: sgn means "subgraph nodes". + self._sgn_partitions_ = None + self._sge_partitions_ = None + + self._sgn_colors_ = None + self._sge_colors_ = None + + self._gn_partitions_ = None + self._ge_partitions_ = None + + self._gn_colors_ = None + self._ge_colors_ = None + + self._node_compat_ = None + self._edge_compat_ = None + + if node_match is None: + self.node_equality = self._node_match_maker(lambda n1, n2: True) + self._sgn_partitions_ = [set(self.subgraph.nodes)] + self._gn_partitions_ = [set(self.graph.nodes)] + self._node_compat_ = {0: 0} + else: + self.node_equality = self._node_match_maker(node_match) + if edge_match is None: + self.edge_equality = self._edge_match_maker(lambda e1, e2: True) + self._sge_partitions_ = [set(self.subgraph.edges)] + self._ge_partitions_ = [set(self.graph.edges)] + self._edge_compat_ = {0: 0} + else: + self.edge_equality = self._edge_match_maker(edge_match) + + @property + def _sgn_partitions(self): + if self._sgn_partitions_ is None: + + def nodematch(node1, node2): + return self.node_equality(self.subgraph, node1, self.subgraph, node2) + + self._sgn_partitions_ = make_partitions(self.subgraph.nodes, nodematch) + return self._sgn_partitions_ + + @property + def _sge_partitions(self): + if self._sge_partitions_ is None: + + def edgematch(edge1, edge2): + return self.edge_equality(self.subgraph, edge1, self.subgraph, edge2) + + self._sge_partitions_ = make_partitions(self.subgraph.edges, edgematch) + return self._sge_partitions_ + + @property + def _gn_partitions(self): + if self._gn_partitions_ is None: + + def nodematch(node1, node2): + return self.node_equality(self.graph, node1, self.graph, node2) + + self._gn_partitions_ = make_partitions(self.graph.nodes, nodematch) + return self._gn_partitions_ + + @property + def _ge_partitions(self): + if self._ge_partitions_ is None: + + def edgematch(edge1, edge2): + return self.edge_equality(self.graph, edge1, self.graph, edge2) + + self._ge_partitions_ = make_partitions(self.graph.edges, edgematch) + return self._ge_partitions_ + + @property + def _sgn_colors(self): + if self._sgn_colors_ is None: + self._sgn_colors_ = partition_to_color(self._sgn_partitions) + return self._sgn_colors_ + + @property + def _sge_colors(self): + if self._sge_colors_ is None: + self._sge_colors_ = partition_to_color(self._sge_partitions) + return self._sge_colors_ + + @property + def _gn_colors(self): + if self._gn_colors_ is None: + self._gn_colors_ = partition_to_color(self._gn_partitions) + return self._gn_colors_ + + @property + def _ge_colors(self): + if self._ge_colors_ is None: + self._ge_colors_ = partition_to_color(self._ge_partitions) + return self._ge_colors_ + + @property + def _node_compatibility(self): + if self._node_compat_ is not None: + return self._node_compat_ + self._node_compat_ = {} + for sgn_part_color, gn_part_color in itertools.product( + range(len(self._sgn_partitions)), range(len(self._gn_partitions)) + ): + sgn = next(iter(self._sgn_partitions[sgn_part_color])) + gn = next(iter(self._gn_partitions[gn_part_color])) + if self.node_equality(self.subgraph, sgn, self.graph, gn): + self._node_compat_[sgn_part_color] = gn_part_color + return self._node_compat_ + + @property + def _edge_compatibility(self): + if self._edge_compat_ is not None: + return self._edge_compat_ + self._edge_compat_ = {} + for sge_part_color, ge_part_color in itertools.product( + range(len(self._sge_partitions)), range(len(self._ge_partitions)) + ): + sge = next(iter(self._sge_partitions[sge_part_color])) + ge = next(iter(self._ge_partitions[ge_part_color])) + if self.edge_equality(self.subgraph, sge, self.graph, ge): + self._edge_compat_[sge_part_color] = ge_part_color + return self._edge_compat_ + + @staticmethod + def _node_match_maker(cmp): + @wraps(cmp) + def comparer(graph1, node1, graph2, node2): + return cmp(graph1.nodes[node1], graph2.nodes[node2]) + + return comparer + + @staticmethod + def _edge_match_maker(cmp): + @wraps(cmp) + def comparer(graph1, edge1, graph2, edge2): + return cmp(graph1.edges[edge1], graph2.edges[edge2]) + + return comparer + + def find_isomorphisms(self, symmetry=True): + """Find all subgraph isomorphisms between subgraph and graph + + Finds isomorphisms where :attr:`subgraph` <= :attr:`graph`. + + Parameters + ---------- + symmetry: bool + Whether symmetry should be taken into account. If False, found + isomorphisms may be symmetrically equivalent. + + Yields + ------ + dict + The found isomorphism mappings of {graph_node: subgraph_node}. + """ + # The networkx VF2 algorithm is slightly funny in when it yields an + # empty dict and when not. + if not self.subgraph: + yield {} + return + elif not self.graph: + return + elif len(self.graph) < len(self.subgraph): + return + + if symmetry: + _, cosets = self.analyze_symmetry( + self.subgraph, self._sgn_partitions, self._sge_colors + ) + constraints = self._make_constraints(cosets) + else: + constraints = [] + + candidates = self._find_nodecolor_candidates() + la_candidates = self._get_lookahead_candidates() + for sgn in self.subgraph: + extra_candidates = la_candidates[sgn] + if extra_candidates: + candidates[sgn] = candidates[sgn] | {frozenset(extra_candidates)} + + if any(candidates.values()): + start_sgn = min(candidates, key=lambda n: min(candidates[n], key=len)) + candidates[start_sgn] = (intersect(candidates[start_sgn]),) + yield from self._map_nodes(start_sgn, candidates, constraints) + else: + return + + @staticmethod + def _find_neighbor_color_count(graph, node, node_color, edge_color): + """ + For `node` in `graph`, count the number of edges of a specific color + it has to nodes of a specific color. + """ + counts = Counter() + neighbors = graph[node] + for neighbor in neighbors: + n_color = node_color[neighbor] + if (node, neighbor) in edge_color: + e_color = edge_color[node, neighbor] + else: + e_color = edge_color[neighbor, node] + counts[e_color, n_color] += 1 + return counts + + def _get_lookahead_candidates(self): + """ + Returns a mapping of {subgraph node: collection of graph nodes} for + which the graph nodes are feasible candidates for the subgraph node, as + determined by looking ahead one edge. + """ + g_counts = {} + for gn in self.graph: + g_counts[gn] = self._find_neighbor_color_count( + self.graph, gn, self._gn_colors, self._ge_colors + ) + candidates = defaultdict(set) + for sgn in self.subgraph: + sg_count = self._find_neighbor_color_count( + self.subgraph, sgn, self._sgn_colors, self._sge_colors + ) + new_sg_count = Counter() + for (sge_color, sgn_color), count in sg_count.items(): + try: + ge_color = self._edge_compatibility[sge_color] + gn_color = self._node_compatibility[sgn_color] + except KeyError: + pass + else: + new_sg_count[ge_color, gn_color] = count + + for gn, g_count in g_counts.items(): + if all(new_sg_count[x] <= g_count[x] for x in new_sg_count): + # Valid candidate + candidates[sgn].add(gn) + return candidates + + def largest_common_subgraph(self, symmetry=True): + """ + Find the largest common induced subgraphs between :attr:`subgraph` and + :attr:`graph`. + + Parameters + ---------- + symmetry: bool + Whether symmetry should be taken into account. If False, found + largest common subgraphs may be symmetrically equivalent. + + Yields + ------ + dict + The found isomorphism mappings of {graph_node: subgraph_node}. + """ + # The networkx VF2 algorithm is slightly funny in when it yields an + # empty dict and when not. + if not self.subgraph: + yield {} + return + elif not self.graph: + return + + if symmetry: + _, cosets = self.analyze_symmetry( + self.subgraph, self._sgn_partitions, self._sge_colors + ) + constraints = self._make_constraints(cosets) + else: + constraints = [] + + candidates = self._find_nodecolor_candidates() + + if any(candidates.values()): + yield from self._largest_common_subgraph(candidates, constraints) + else: + return + + def analyze_symmetry(self, graph, node_partitions, edge_colors): + """ + Find a minimal set of permutations and corresponding co-sets that + describe the symmetry of `graph`, given the node and edge equalities + given by `node_partitions` and `edge_colors`, respectively. + + Parameters + ---------- + graph : networkx.Graph + The graph whose symmetry should be analyzed. + node_partitions : list of sets + A list of sets containing node keys. Node keys in the same set + are considered equivalent. Every node key in `graph` should be in + exactly one of the sets. If all nodes are equivalent, this should + be ``[set(graph.nodes)]``. + edge_colors : dict mapping edges to their colors + A dict mapping every edge in `graph` to its corresponding color. + Edges with the same color are considered equivalent. If all edges + are equivalent, this should be ``{e: 0 for e in graph.edges}``. + + + Returns + ------- + set[frozenset] + The found permutations. This is a set of frozensets of pairs of node + keys which can be exchanged without changing :attr:`subgraph`. + dict[collections.abc.Hashable, set[collections.abc.Hashable]] + The found co-sets. The co-sets is a dictionary of + ``{node key: set of node keys}``. + Every key-value pair describes which ``values`` can be interchanged + without changing nodes less than ``key``. + """ + if self._symmetry_cache is not None: + key = hash( + ( + tuple(graph.nodes), + tuple(graph.edges), + tuple(map(tuple, node_partitions)), + tuple(edge_colors.items()), + ) + ) + if key in self._symmetry_cache: + return self._symmetry_cache[key] + node_partitions = list( + self._refine_node_partitions(graph, node_partitions, edge_colors) + ) + assert len(node_partitions) == 1 + node_partitions = node_partitions[0] + permutations, cosets = self._process_ordered_pair_partitions( + graph, node_partitions, node_partitions, edge_colors + ) + if self._symmetry_cache is not None: + self._symmetry_cache[key] = permutations, cosets + return permutations, cosets + + def is_isomorphic(self, symmetry=False): + """ + Returns True if :attr:`graph` is isomorphic to :attr:`subgraph` and + False otherwise. + + Returns + ------- + bool + """ + return len(self.subgraph) == len(self.graph) and self.subgraph_is_isomorphic( + symmetry + ) + + def subgraph_is_isomorphic(self, symmetry=False): + """ + Returns True if a subgraph of :attr:`graph` is isomorphic to + :attr:`subgraph` and False otherwise. + + Returns + ------- + bool + """ + # symmetry=False, since we only need to know whether there is any + # example; figuring out all symmetry elements probably costs more time + # than it gains. + isom = next(self.subgraph_isomorphisms_iter(symmetry=symmetry), None) + return isom is not None + + def isomorphisms_iter(self, symmetry=True): + """ + Does the same as :meth:`find_isomorphisms` if :attr:`graph` and + :attr:`subgraph` have the same number of nodes. + """ + if len(self.graph) == len(self.subgraph): + yield from self.subgraph_isomorphisms_iter(symmetry=symmetry) + + def subgraph_isomorphisms_iter(self, symmetry=True): + """Alternative name for :meth:`find_isomorphisms`.""" + return self.find_isomorphisms(symmetry) + + def _find_nodecolor_candidates(self): + """ + Per node in subgraph find all nodes in graph that have the same color. + """ + candidates = defaultdict(set) + for sgn in self.subgraph.nodes: + sgn_color = self._sgn_colors[sgn] + if sgn_color in self._node_compatibility: + gn_color = self._node_compatibility[sgn_color] + candidates[sgn].add(frozenset(self._gn_partitions[gn_color])) + else: + candidates[sgn].add(frozenset()) + candidates = dict(candidates) + for sgn, options in candidates.items(): + candidates[sgn] = frozenset(options) + return candidates + + @staticmethod + def _make_constraints(cosets): + """ + Turn cosets into constraints. + """ + constraints = [] + for node_i, node_ts in cosets.items(): + for node_t in node_ts: + if node_i != node_t: + # Node i must be smaller than node t. + constraints.append((node_i, node_t)) + return constraints + + @staticmethod + def _find_node_edge_color(graph, node_colors, edge_colors): + """ + For every node in graph, come up with a color that combines 1) the + color of the node, and 2) the number of edges of a color to each type + of node. + """ + counts = defaultdict(lambda: defaultdict(int)) + for node1, node2 in graph.edges: + if (node1, node2) in edge_colors: + # FIXME directed graphs + ecolor = edge_colors[node1, node2] + else: + ecolor = edge_colors[node2, node1] + # Count per node how many edges it has of what color to nodes of + # what color + counts[node1][ecolor, node_colors[node2]] += 1 + counts[node2][ecolor, node_colors[node1]] += 1 + + node_edge_colors = {} + for node in graph.nodes: + node_edge_colors[node] = node_colors[node], set(counts[node].items()) + + return node_edge_colors + + @staticmethod + def _get_permutations_by_length(items): + """ + Get all permutations of items, but only permute items with the same + length. + + >>> found = list(ISMAGS._get_permutations_by_length([[1], [2], [3, 4], [4, 5]])) + >>> answer = [ + ... (([1], [2]), ([3, 4], [4, 5])), + ... (([1], [2]), ([4, 5], [3, 4])), + ... (([2], [1]), ([3, 4], [4, 5])), + ... (([2], [1]), ([4, 5], [3, 4])), + ... ] + >>> found == answer + True + """ + by_len = defaultdict(list) + for item in items: + by_len[len(item)].append(item) + + yield from itertools.product( + *(itertools.permutations(by_len[l]) for l in sorted(by_len)) + ) + + @classmethod + def _refine_node_partitions(cls, graph, node_partitions, edge_colors, branch=False): + """ + Given a partition of nodes in graph, make the partitions smaller such + that all nodes in a partition have 1) the same color, and 2) the same + number of edges to specific other partitions. + """ + + def equal_color(node1, node2): + return node_edge_colors[node1] == node_edge_colors[node2] + + node_partitions = list(node_partitions) + node_colors = partition_to_color(node_partitions) + node_edge_colors = cls._find_node_edge_color(graph, node_colors, edge_colors) + if all( + are_all_equal(node_edge_colors[node] for node in partition) + for partition in node_partitions + ): + yield node_partitions + return + + new_partitions = [] + output = [new_partitions] + for partition in node_partitions: + if not are_all_equal(node_edge_colors[node] for node in partition): + refined = make_partitions(partition, equal_color) + if ( + branch + and len(refined) != 1 + and len({len(r) for r in refined}) != len([len(r) for r in refined]) + ): + # This is where it breaks. There are multiple new cells + # in refined with the same length, and their order + # matters. + # So option 1) Hit it with a big hammer and simply make all + # orderings. + permutations = cls._get_permutations_by_length(refined) + new_output = [] + for n_p in output: + for permutation in permutations: + new_output.append(n_p + list(permutation[0])) + output = new_output + else: + for n_p in output: + n_p.extend(sorted(refined, key=len)) + else: + for n_p in output: + n_p.append(partition) + for n_p in output: + yield from cls._refine_node_partitions(graph, n_p, edge_colors, branch) + + def _edges_of_same_color(self, sgn1, sgn2): + """ + Returns all edges in :attr:`graph` that have the same colour as the + edge between sgn1 and sgn2 in :attr:`subgraph`. + """ + if (sgn1, sgn2) in self._sge_colors: + # FIXME directed graphs + sge_color = self._sge_colors[sgn1, sgn2] + else: + sge_color = self._sge_colors[sgn2, sgn1] + if sge_color in self._edge_compatibility: + ge_color = self._edge_compatibility[sge_color] + g_edges = self._ge_partitions[ge_color] + else: + g_edges = [] + return g_edges + + def _map_nodes(self, sgn, candidates, constraints, mapping=None, to_be_mapped=None): + """ + Find all subgraph isomorphisms honoring constraints. + """ + if mapping is None: + mapping = {} + else: + mapping = mapping.copy() + if to_be_mapped is None: + to_be_mapped = set(self.subgraph.nodes) + + # Note, we modify candidates here. Doesn't seem to affect results, but + # remember this. + # candidates = candidates.copy() + sgn_candidates = intersect(candidates[sgn]) + candidates[sgn] = frozenset([sgn_candidates]) + for gn in sgn_candidates: + # We're going to try to map sgn to gn. + if gn in mapping.values() or sgn not in to_be_mapped: + # gn is already mapped to something + continue # pragma: no cover + + # REDUCTION and COMBINATION + mapping[sgn] = gn + # BASECASE + if to_be_mapped == set(mapping.keys()): + yield {v: k for k, v in mapping.items()} + continue + left_to_map = to_be_mapped - set(mapping.keys()) + + new_candidates = candidates.copy() + sgn_nbrs = set(self.subgraph[sgn]) + not_gn_nbrs = set(self.graph.nodes) - set(self.graph[gn]) + for sgn2 in left_to_map: + if sgn2 not in sgn_nbrs: + gn2_options = not_gn_nbrs + else: + # Get all edges to gn of the right color: + g_edges = self._edges_of_same_color(sgn, sgn2) + # FIXME directed graphs + # And all nodes involved in those which are connected to gn + gn2_options = {n for e in g_edges for n in e if gn in e} + # Node color compatibility should be taken care of by the + # initial candidate lists made by find_subgraphs + + # Add gn2_options to the right collection. Since new_candidates + # is a dict of frozensets of frozensets of node indices it's + # a bit clunky. We can't do .add, and + also doesn't work. We + # could do |, but I deem union to be clearer. + new_candidates[sgn2] = new_candidates[sgn2].union( + [frozenset(gn2_options)] + ) + + if (sgn, sgn2) in constraints: + gn2_options = {gn2 for gn2 in self.graph if gn2 > gn} + elif (sgn2, sgn) in constraints: + gn2_options = {gn2 for gn2 in self.graph if gn2 < gn} + else: + continue # pragma: no cover + new_candidates[sgn2] = new_candidates[sgn2].union( + [frozenset(gn2_options)] + ) + + # The next node is the one that is unmapped and has fewest + # candidates + next_sgn = min(left_to_map, key=lambda n: min(new_candidates[n], key=len)) + yield from self._map_nodes( + next_sgn, + new_candidates, + constraints, + mapping=mapping, + to_be_mapped=to_be_mapped, + ) + # Unmap sgn-gn. Strictly not necessary since it'd get overwritten + # when making a new mapping for sgn. + # del mapping[sgn] + + def _largest_common_subgraph(self, candidates, constraints, to_be_mapped=None): + """ + Find all largest common subgraphs honoring constraints. + """ + if to_be_mapped is None: + to_be_mapped = {frozenset(self.subgraph.nodes)} + + # The LCS problem is basically a repeated subgraph isomorphism problem + # with smaller and smaller subgraphs. We store the nodes that are + # "part of" the subgraph in to_be_mapped, and we make it a little + # smaller every iteration. + + current_size = len(next(iter(to_be_mapped), [])) + + found_iso = False + if current_size <= len(self.graph): + # There's no point in trying to find isomorphisms of + # graph >= subgraph if subgraph has more nodes than graph. + + # Try the isomorphism first with the nodes with lowest ID. So sort + # them. Those are more likely to be part of the final + # correspondence. This makes finding the first answer(s) faster. In + # theory. + for nodes in sorted(to_be_mapped, key=sorted): + # Find the isomorphism between subgraph[to_be_mapped] <= graph + next_sgn = min(nodes, key=lambda n: min(candidates[n], key=len)) + isomorphs = self._map_nodes( + next_sgn, candidates, constraints, to_be_mapped=nodes + ) + + # This is effectively `yield from isomorphs`, except that we look + # whether an item was yielded. + try: + item = next(isomorphs) + except StopIteration: + pass + else: + yield item + yield from isomorphs + found_iso = True + + # BASECASE + if found_iso or current_size == 1: + # Shrinking has no point because either 1) we end up with a smaller + # common subgraph (and we want the largest), or 2) there'll be no + # more subgraph. + return + + left_to_be_mapped = set() + for nodes in to_be_mapped: + for sgn in nodes: + # We're going to remove sgn from to_be_mapped, but subject to + # symmetry constraints. We know that for every constraint we + # have those subgraph nodes are equal. So whenever we would + # remove the lower part of a constraint, remove the higher + # instead. This is all dealth with by _remove_node. And because + # left_to_be_mapped is a set, we don't do double work. + + # And finally, make the subgraph one node smaller. + # REDUCTION + new_nodes = self._remove_node(sgn, nodes, constraints) + left_to_be_mapped.add(new_nodes) + # COMBINATION + yield from self._largest_common_subgraph( + candidates, constraints, to_be_mapped=left_to_be_mapped + ) + + @staticmethod + def _remove_node(node, nodes, constraints): + """ + Returns a new set where node has been removed from nodes, subject to + symmetry constraints. We know, that for every constraint we have + those subgraph nodes are equal. So whenever we would remove the + lower part of a constraint, remove the higher instead. + """ + while True: + for low, high in constraints: + if low == node and high in nodes: + node = high + break + else: # no break, couldn't find node in constraints + break + return frozenset(nodes - {node}) + + @staticmethod + def _find_permutations(top_partitions, bottom_partitions): + """ + Return the pairs of top/bottom partitions where the partitions are + different. Ensures that all partitions in both top and bottom + partitions have size 1. + """ + # Find permutations + permutations = set() + for top, bot in zip(top_partitions, bottom_partitions): + # top and bot have only one element + if len(top) != 1 or len(bot) != 1: + raise IndexError( + "Not all nodes are coupled. This is" + f" impossible: {top_partitions}, {bottom_partitions}" + ) + if top != bot: + permutations.add(frozenset((next(iter(top)), next(iter(bot))))) + return permutations + + @staticmethod + def _update_orbits(orbits, permutations): + """ + Update orbits based on permutations. Orbits is modified in place. + For every pair of items in permutations their respective orbits are + merged. + """ + for permutation in permutations: + node, node2 = permutation + # Find the orbits that contain node and node2, and replace the + # orbit containing node with the union + first = second = None + for idx, orbit in enumerate(orbits): + if first is not None and second is not None: + break + if node in orbit: + first = idx + if node2 in orbit: + second = idx + if first != second: + orbits[first].update(orbits[second]) + del orbits[second] + + def _couple_nodes( + self, + top_partitions, + bottom_partitions, + pair_idx, + t_node, + b_node, + graph, + edge_colors, + ): + """ + Generate new partitions from top and bottom_partitions where t_node is + coupled to b_node. pair_idx is the index of the partitions where t_ and + b_node can be found. + """ + t_partition = top_partitions[pair_idx] + b_partition = bottom_partitions[pair_idx] + assert t_node in t_partition and b_node in b_partition + # Couple node to node2. This means they get their own partition + new_top_partitions = [top.copy() for top in top_partitions] + new_bottom_partitions = [bot.copy() for bot in bottom_partitions] + new_t_groups = {t_node}, t_partition - {t_node} + new_b_groups = {b_node}, b_partition - {b_node} + # Replace the old partitions with the coupled ones + del new_top_partitions[pair_idx] + del new_bottom_partitions[pair_idx] + new_top_partitions[pair_idx:pair_idx] = new_t_groups + new_bottom_partitions[pair_idx:pair_idx] = new_b_groups + + new_top_partitions = self._refine_node_partitions( + graph, new_top_partitions, edge_colors + ) + new_bottom_partitions = self._refine_node_partitions( + graph, new_bottom_partitions, edge_colors, branch=True + ) + new_top_partitions = list(new_top_partitions) + assert len(new_top_partitions) == 1 + new_top_partitions = new_top_partitions[0] + for bot in new_bottom_partitions: + yield list(new_top_partitions), bot + + def _process_ordered_pair_partitions( + self, + graph, + top_partitions, + bottom_partitions, + edge_colors, + orbits=None, + cosets=None, + ): + """ + Processes ordered pair partitions as per the reference paper. Finds and + returns all permutations and cosets that leave the graph unchanged. + """ + if orbits is None: + orbits = [{node} for node in graph.nodes] + else: + # Note that we don't copy orbits when we are given one. This means + # we leak information between the recursive branches. This is + # intentional! + orbits = orbits + if cosets is None: + cosets = {} + else: + cosets = cosets.copy() + + if not all( + len(t_p) == len(b_p) for t_p, b_p in zip(top_partitions, bottom_partitions) + ): + # This used to be an assertion, but it gets tripped in rare cases: + # 5 - 4 \ / 12 - 13 + # 0 - 3 + # 9 - 8 / \ 16 - 17 + # Assume 0 and 3 are coupled and no longer equivalent. At that point + # {4, 8} and {12, 16} are no longer equivalent, and neither are + # {5, 9} and {13, 17}. Coupling 4 and refinement results in 5 and 9 + # getting their own partitions, *but not 13 and 17*. Further + # iterations will attempt to couple 5 to {13, 17}, which cannot + # result in more symmetries? + return [], cosets + + # BASECASE + if all(len(top) == 1 for top in top_partitions): + # All nodes are mapped + permutations = self._find_permutations(top_partitions, bottom_partitions) + self._update_orbits(orbits, permutations) + if permutations: + return [permutations], cosets + else: + return [], cosets + + permutations = [] + unmapped_nodes = { + (node, idx) + for idx, t_partition in enumerate(top_partitions) + for node in t_partition + if len(t_partition) > 1 + } + node, pair_idx = min(unmapped_nodes) + b_partition = bottom_partitions[pair_idx] + + for node2 in sorted(b_partition): + if len(b_partition) == 1: + # Can never result in symmetry + continue + if node != node2 and any( + node in orbit and node2 in orbit for orbit in orbits + ): + # Orbit prune branch + continue + # REDUCTION + # Couple node to node2 + partitions = self._couple_nodes( + top_partitions, + bottom_partitions, + pair_idx, + node, + node2, + graph, + edge_colors, + ) + for opp in partitions: + new_top_partitions, new_bottom_partitions = opp + + new_perms, new_cosets = self._process_ordered_pair_partitions( + graph, + new_top_partitions, + new_bottom_partitions, + edge_colors, + orbits, + cosets, + ) + # COMBINATION + permutations += new_perms + cosets.update(new_cosets) + + mapped = { + k + for top, bottom in zip(top_partitions, bottom_partitions) + for k in top + if len(top) == 1 and top == bottom + } + ks = {k for k in graph.nodes if k < node} + # Have all nodes with ID < node been mapped? + find_coset = ks <= mapped and node not in cosets + if find_coset: + # Find the orbit that contains node + for orbit in orbits: + if node in orbit: + cosets[node] = orbit.copy() + return permutations, cosets diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py new file mode 100644 index 0000000..f49594a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorph.py @@ -0,0 +1,336 @@ +""" +Graph isomorphism functions. +""" + +import itertools +from collections import Counter + +import networkx as nx +from networkx.exception import NetworkXError + +__all__ = [ + "could_be_isomorphic", + "fast_could_be_isomorphic", + "faster_could_be_isomorphic", + "is_isomorphic", +] + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) +def could_be_isomorphic(G1, G2, *, properties="dtc"): + """Returns False if graphs are definitely not isomorphic. + True does NOT guarantee isomorphism. + + Parameters + ---------- + G1, G2 : graphs + The two graphs `G1` and `G2` must be the same type. + + properties : str, default="dct" + Determines which properties of the graph are checked. Each character + indicates a particular property as follows: + + - if ``"d"`` in `properties`: degree of each node + - if ``"t"`` in `properties`: number of triangles for each node + - if ``"c"`` in `properties`: number of maximal cliques for each node + + Unrecognized characters are ignored. The default is ``"dtc"``, which + compares the sequence of ``(degree, num_triangles, num_cliques)`` properties + between `G1` and `G2`. Generally, ``properties="dt"`` would be faster, and + ``properties="d"`` faster still. See Notes for additional details on + property selection. + + Returns + ------- + bool + A Boolean value representing whether `G1` could be isomorphic with `G2` + according to the specified `properties`. + + Notes + ----- + The triangle sequence contains the number of triangles each node is part of. + The clique sequence contains for each node the number of maximal cliques + involving that node. + + Some properties are faster to compute than others. And there are other + properties we could include and don't. But of the three properties listed here, + comparing the degree distributions is the fastest. The "triangles" property + is slower (and also a stricter version of "could") and the "maximal cliques" + property is slower still, but usually faster than doing a full isomorphism + check. + """ + + # Check global properties + if G1.order() != G2.order(): + return False + + properties_to_check = set(properties) + G1_props, G2_props = [], [] + + def _properties_consistent(): + # Ravel the properties into a table with # nodes rows and # properties columns + G1_ptable = [tuple(p[n] for p in G1_props) for n in G1] + G2_ptable = [tuple(p[n] for p in G2_props) for n in G2] + + return sorted(G1_ptable) == sorted(G2_ptable) + + # The property table is built and checked as each individual property is + # added. The reason for this is the building/checking the property table + # is in general much faster than computing the properties, making it + # worthwhile to check multiple times to enable early termination when + # a subset of properties don't match + + # Degree sequence + if "d" in properties_to_check: + G1_props.append(G1.degree()) + G2_props.append(G2.degree()) + if not _properties_consistent(): + return False + # Sequence of triangles per node + if "t" in properties_to_check: + G1_props.append(nx.triangles(G1)) + G2_props.append(nx.triangles(G2)) + if not _properties_consistent(): + return False + # Sequence of maximal cliques per node + if "c" in properties_to_check: + G1_props.append(Counter(itertools.chain.from_iterable(nx.find_cliques(G1)))) + G2_props.append(Counter(itertools.chain.from_iterable(nx.find_cliques(G2)))) + if not _properties_consistent(): + return False + + # All checked conditions passed + return True + + +def graph_could_be_isomorphic(G1, G2): + """ + .. deprecated:: 3.5 + + `graph_could_be_isomorphic` is a deprecated alias for `could_be_isomorphic`. + Use `could_be_isomorphic` instead. + """ + import warnings + + warnings.warn( + "graph_could_be_isomorphic is deprecated, use `could_be_isomorphic` instead.", + category=DeprecationWarning, + stacklevel=2, + ) + return could_be_isomorphic(G1, G2) + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) +def fast_could_be_isomorphic(G1, G2): + """Returns False if graphs are definitely not isomorphic. + + True does NOT guarantee isomorphism. + + Parameters + ---------- + G1, G2 : graphs + The two graphs G1 and G2 must be the same type. + + Notes + ----- + Checks for matching degree and triangle sequences. The triangle + sequence contains the number of triangles each node is part of. + """ + # Check global properties + if G1.order() != G2.order(): + return False + + # Check local properties + d1 = G1.degree() + t1 = nx.triangles(G1) + props1 = [[d, t1[v]] for v, d in d1] + props1.sort() + + d2 = G2.degree() + t2 = nx.triangles(G2) + props2 = [[d, t2[v]] for v, d in d2] + props2.sort() + + if props1 != props2: + return False + + # OK... + return True + + +def fast_graph_could_be_isomorphic(G1, G2): + """ + .. deprecated:: 3.5 + + `fast_graph_could_be_isomorphic` is a deprecated alias for + `fast_could_be_isomorphic`. Use `fast_could_be_isomorphic` instead. + """ + import warnings + + warnings.warn( + "fast_graph_could_be_isomorphic is deprecated, use fast_could_be_isomorphic instead", + category=DeprecationWarning, + stacklevel=2, + ) + return fast_could_be_isomorphic(G1, G2) + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) +def faster_could_be_isomorphic(G1, G2): + """Returns False if graphs are definitely not isomorphic. + + True does NOT guarantee isomorphism. + + Parameters + ---------- + G1, G2 : graphs + The two graphs G1 and G2 must be the same type. + + Notes + ----- + Checks for matching degree sequences. + """ + # Check global properties + if G1.order() != G2.order(): + return False + + # Check local properties + d1 = sorted(d for n, d in G1.degree()) + d2 = sorted(d for n, d in G2.degree()) + + if d1 != d2: + return False + + # OK... + return True + + +def faster_graph_could_be_isomorphic(G1, G2): + """ + .. deprecated:: 3.5 + + `faster_graph_could_be_isomorphic` is a deprecated alias for + `faster_could_be_isomorphic`. Use `faster_could_be_isomorphic` instead. + """ + import warnings + + warnings.warn( + "faster_graph_could_be_isomorphic is deprecated, use faster_could_be_isomorphic instead", + category=DeprecationWarning, + stacklevel=2, + ) + return faster_could_be_isomorphic(G1, G2) + + +@nx._dispatchable( + graphs={"G1": 0, "G2": 1}, + preserve_edge_attrs="edge_match", + preserve_node_attrs="node_match", +) +def is_isomorphic(G1, G2, node_match=None, edge_match=None): + """Returns True if the graphs G1 and G2 are isomorphic and False otherwise. + + Parameters + ---------- + G1, G2: graphs + The two graphs G1 and G2 must be the same type. + + node_match : callable + A function that returns True if node n1 in G1 and n2 in G2 should + be considered equal during the isomorphism test. + If node_match is not specified then node attributes are not considered. + + The function will be called like + + node_match(G1.nodes[n1], G2.nodes[n2]). + + That is, the function will receive the node attribute dictionaries + for n1 and n2 as inputs. + + edge_match : callable + A function that returns True if the edge attribute dictionary + for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should + be considered equal during the isomorphism test. If edge_match is + not specified then edge attributes are not considered. + + The function will be called like + + edge_match(G1[u1][v1], G2[u2][v2]). + + That is, the function will receive the edge attribute dictionaries + of the edges under consideration. + + Notes + ----- + Uses the vf2 algorithm [1]_. + + Examples + -------- + >>> import networkx.algorithms.isomorphism as iso + + For digraphs G1 and G2, using 'weight' edge attribute (default: 1) + + >>> G1 = nx.DiGraph() + >>> G2 = nx.DiGraph() + >>> nx.add_path(G1, [1, 2, 3, 4], weight=1) + >>> nx.add_path(G2, [10, 20, 30, 40], weight=2) + >>> em = iso.numerical_edge_match("weight", 1) + >>> nx.is_isomorphic(G1, G2) # no weights considered + True + >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights + False + + For multidigraphs G1 and G2, using 'fill' node attribute (default: '') + + >>> G1 = nx.MultiDiGraph() + >>> G2 = nx.MultiDiGraph() + >>> G1.add_nodes_from([1, 2, 3], fill="red") + >>> G2.add_nodes_from([10, 20, 30, 40], fill="red") + >>> nx.add_path(G1, [1, 2, 3, 4], weight=3, linewidth=2.5) + >>> nx.add_path(G2, [10, 20, 30, 40], weight=3) + >>> nm = iso.categorical_node_match("fill", "red") + >>> nx.is_isomorphic(G1, G2, node_match=nm) + True + + For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7) + + >>> G1.add_edge(1, 2, weight=7) + 1 + >>> G2.add_edge(10, 20) + 1 + >>> em = iso.numerical_multiedge_match("weight", 7, rtol=1e-6) + >>> nx.is_isomorphic(G1, G2, edge_match=em) + True + + For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes + with default values 7 and 2.5. Also using 'fill' node attribute with + default value 'red'. + + >>> em = iso.numerical_multiedge_match(["weight", "linewidth"], [7, 2.5]) + >>> nm = iso.categorical_node_match("fill", "red") + >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm) + True + + See Also + -------- + numerical_node_match, numerical_edge_match, numerical_multiedge_match + categorical_node_match, categorical_edge_match, categorical_multiedge_match + + References + ---------- + .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, + "An Improved Algorithm for Matching Large Graphs", + 3rd IAPR-TC15 Workshop on Graph-based Representations in + Pattern Recognition, Cuen, pp. 149-159, 2001. + https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs + """ + if G1.is_directed() and G2.is_directed(): + GM = nx.algorithms.isomorphism.DiGraphMatcher + elif (not G1.is_directed()) and (not G2.is_directed()): + GM = nx.algorithms.isomorphism.GraphMatcher + else: + raise NetworkXError("Graphs G1 and G2 are not of the same type.") + + gm = GM(G1, G2, node_match=node_match, edge_match=edge_match) + + return gm.is_isomorphic() diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py new file mode 100644 index 0000000..587503a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/isomorphvf2.py @@ -0,0 +1,1262 @@ +""" +************* +VF2 Algorithm +************* + +An implementation of VF2 algorithm for graph isomorphism testing. + +The simplest interface to use this module is to call the +:func:`is_isomorphic ` +function. + +Introduction +------------ + +The GraphMatcher and DiGraphMatcher are responsible for matching +graphs or directed graphs in a predetermined manner. This +usually means a check for an isomorphism, though other checks +are also possible. For example, a subgraph of one graph +can be checked for isomorphism to a second graph. + +Matching is done via syntactic feasibility. It is also possible +to check for semantic feasibility. Feasibility, then, is defined +as the logical AND of the two functions. + +To include a semantic check, the (Di)GraphMatcher class should be +subclassed, and the +:meth:`semantic_feasibility ` +function should be redefined. By default, the semantic feasibility function always +returns ``True``. The effect of this is that semantics are not +considered in the matching of G1 and G2. + +Examples +-------- + +Suppose G1 and G2 are isomorphic graphs. Verification is as follows: + +>>> from networkx.algorithms import isomorphism +>>> G1 = nx.path_graph(4) +>>> G2 = nx.path_graph(4) +>>> GM = isomorphism.GraphMatcher(G1, G2) +>>> GM.is_isomorphic() +True + +GM.mapping stores the isomorphism mapping from G1 to G2. + +>>> GM.mapping +{0: 0, 1: 1, 2: 2, 3: 3} + + +Suppose G1 and G2 are isomorphic directed graphs. +Verification is as follows: + +>>> G1 = nx.path_graph(4, create_using=nx.DiGraph) +>>> G2 = nx.path_graph(4, create_using=nx.DiGraph) +>>> DiGM = isomorphism.DiGraphMatcher(G1, G2) +>>> DiGM.is_isomorphic() +True + +DiGM.mapping stores the isomorphism mapping from G1 to G2. + +>>> DiGM.mapping +{0: 0, 1: 1, 2: 2, 3: 3} + + + +Subgraph Isomorphism +-------------------- +Graph theory literature can be ambiguous about the meaning of the +above statement, and we seek to clarify it now. + +In the VF2 literature, a mapping ``M`` is said to be a graph-subgraph +isomorphism iff ``M`` is an isomorphism between ``G2`` and a subgraph of ``G1``. +Thus, to say that ``G1`` and ``G2`` are graph-subgraph isomorphic is to say +that a subgraph of ``G1`` is isomorphic to ``G2``. + +Other literature uses the phrase 'subgraph isomorphic' as in '``G1`` does +not have a subgraph isomorphic to ``G2``'. Another use is as an in adverb +for isomorphic. Thus, to say that ``G1`` and ``G2`` are subgraph isomorphic +is to say that a subgraph of ``G1`` is isomorphic to ``G2``. + +Finally, the term 'subgraph' can have multiple meanings. In this +context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced +subgraph isomorphisms are not directly supported, but one should be +able to perform the check by making use of +:func:`line_graph `. For +subgraphs which are not induced, the term 'monomorphism' is preferred +over 'isomorphism'. + +Let ``G = (N, E)`` be a graph with a set of nodes ``N`` and set of edges ``E``. + +If ``G' = (N', E')`` is a subgraph, then: + ``N'`` is a subset of ``N`` and + ``E'`` is a subset of ``E``. + +If ``G' = (N', E')`` is a node-induced subgraph, then: + ``N'`` is a subset of ``N`` and + ``E'`` is the subset of edges in ``E`` relating nodes in ``N'``. + +If ``G' = (N', E')`` is an edge-induced subgraph, then: + ``N'`` is the subset of nodes in ``N`` related by edges in ``E'`` and + ``E'`` is a subset of ``E``. + +If ``G' = (N', E')`` is a monomorphism, then: + ``N'`` is a subset of ``N`` and + ``E'`` is a subset of the set of edges in ``E`` relating nodes in ``N'``. + +Note that if ``G'`` is a node-induced subgraph of ``G``, then it is always a +subgraph monomorphism of ``G``, but the opposite is not always true, as a +monomorphism can have fewer edges. + +References +---------- +[1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento, + "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs", + IEEE Transactions on Pattern Analysis and Machine Intelligence, + vol. 26, no. 10, pp. 1367-1372, Oct., 2004. + http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf + +[2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved + Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop + on Graph-based Representations in Pattern Recognition, Cuen, + pp. 149-159, 2001. + https://www.researchgate.net/publication/200034365_An_Improved_Algorithm_for_Matching_Large_Graphs + +See Also +-------- +:meth:`semantic_feasibility ` +:meth:`syntactic_feasibility ` + +Notes +----- + +The implementation handles both directed and undirected graphs as well +as multigraphs. + +In general, the subgraph isomorphism problem is NP-complete whereas the +graph isomorphism problem is most likely not NP-complete (although no +polynomial-time algorithm is known to exist). + +""" + +# This work was originally coded by Christopher Ellison +# as part of the Computational Mechanics Python (CMPy) project. +# James P. Crutchfield, principal investigator. +# Complexity Sciences Center and Physics Department, UC Davis. + +import sys + +import networkx as nx + +__all__ = ["GraphMatcher", "DiGraphMatcher"] + + +class GraphMatcher: + """Implementation of VF2 algorithm for matching undirected graphs. + + Suitable for Graph and MultiGraph instances. + """ + + def __init__(self, G1, G2): + """Initialize GraphMatcher. + + Parameters + ---------- + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism. + + Examples + -------- + To create a GraphMatcher which checks for syntactic feasibility: + + >>> from networkx.algorithms import isomorphism + >>> G1 = nx.path_graph(4) + >>> G2 = nx.path_graph(4) + >>> GM = isomorphism.GraphMatcher(G1, G2) + """ + if G1.is_directed() != G2.is_directed(): + raise nx.NetworkXError("G1 and G2 must have the same directedness") + + is_directed_matcher = self._is_directed_matcher() + if not is_directed_matcher and (G1.is_directed() or G2.is_directed()): + raise nx.NetworkXError( + "(Multi-)GraphMatcher() not defined for directed graphs. " + "Use (Multi-)DiGraphMatcher() instead." + ) + + if is_directed_matcher and not (G1.is_directed() and G2.is_directed()): + raise nx.NetworkXError( + "(Multi-)DiGraphMatcher() not defined for undirected graphs. " + "Use (Multi-)GraphMatcher() instead." + ) + + self.G1 = G1 + self.G2 = G2 + self.G1_nodes = set(G1.nodes()) + self.G2_nodes = set(G2.nodes()) + self.G2_node_order = {n: i for i, n in enumerate(G2)} + + # Set recursion limit. + self.old_recursion_limit = sys.getrecursionlimit() + expected_max_recursion_level = len(self.G2) + if self.old_recursion_limit < 1.5 * expected_max_recursion_level: + # Give some breathing room. + sys.setrecursionlimit(int(1.5 * expected_max_recursion_level)) + + # Declare that we will be searching for a graph-graph isomorphism. + self.test = "graph" + + # Initialize state + self.initialize() + + def _is_directed_matcher(self): + return False + + def reset_recursion_limit(self): + """Restores the recursion limit.""" + # TODO: + # Currently, we use recursion and set the recursion level higher. + # It would be nice to restore the level, but because the + # (Di)GraphMatcher classes make use of cyclic references, garbage + # collection will never happen when we define __del__() to + # restore the recursion level. The result is a memory leak. + # So for now, we do not automatically restore the recursion level, + # and instead provide a method to do this manually. Eventually, + # we should turn this into a non-recursive implementation. + sys.setrecursionlimit(self.old_recursion_limit) + + def candidate_pairs_iter(self): + """Iterator over candidate pairs of nodes in G1 and G2.""" + + # All computations are done using the current state! + + G1_nodes = self.G1_nodes + G2_nodes = self.G2_nodes + min_key = self.G2_node_order.__getitem__ + + # First we compute the inout-terminal sets. + T1_inout = [node for node in self.inout_1 if node not in self.core_1] + T2_inout = [node for node in self.inout_2 if node not in self.core_2] + + # If T1_inout and T2_inout are both nonempty. + # P(s) = T1_inout x {min T2_inout} + if T1_inout and T2_inout: + node_2 = min(T2_inout, key=min_key) + for node_1 in T1_inout: + yield node_1, node_2 + + else: + # If T1_inout and T2_inout were both empty.... + # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} + # if not (T1_inout or T2_inout): # as suggested by [2], incorrect + if 1: # as inferred from [1], correct + # First we determine the candidate node for G2 + other_node = min(G2_nodes - set(self.core_2), key=min_key) + for node in self.G1: + if node not in self.core_1: + yield node, other_node + + # For all other cases, we don't have any candidate pairs. + + def initialize(self): + """Reinitializes the state of the algorithm. + + This method should be redefined if using something other than GMState. + If only subclassing GraphMatcher, a redefinition is not necessary. + + """ + + # core_1[n] contains the index of the node paired with n, which is m, + # provided n is in the mapping. + # core_2[m] contains the index of the node paired with m, which is n, + # provided m is in the mapping. + self.core_1 = {} + self.core_2 = {} + + # See the paper for definitions of M_x and T_x^{y} + + # inout_1[n] is non-zero if n is in M_1 or in T_1^{inout} + # inout_2[m] is non-zero if m is in M_2 or in T_2^{inout} + # + # The value stored is the depth of the SSR tree when the node became + # part of the corresponding set. + self.inout_1 = {} + self.inout_2 = {} + # Practically, these sets simply store the nodes in the subgraph. + + self.state = GMState(self) + + # Provide a convenient way to access the isomorphism mapping. + self.mapping = self.core_1.copy() + + def is_isomorphic(self): + """Returns True if G1 and G2 are isomorphic graphs.""" + + # Let's do two very quick checks! + # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)? + # For now, I just copy the code. + + # Check global properties + if self.G1.order() != self.G2.order(): + return False + + # Check local properties + d1 = sorted(d for n, d in self.G1.degree()) + d2 = sorted(d for n, d in self.G2.degree()) + if d1 != d2: + return False + + try: + x = next(self.isomorphisms_iter()) + return True + except StopIteration: + return False + + def isomorphisms_iter(self): + """Generator over isomorphisms between G1 and G2.""" + # Declare that we are looking for a graph-graph isomorphism. + self.test = "graph" + self.initialize() + yield from self.match() + + def match(self): + """Extends the isomorphism mapping. + + This function is called recursively to determine if a complete + isomorphism can be found between G1 and G2. It cleans up the class + variables after each recursive call. If an isomorphism is found, + we yield the mapping. + + """ + if len(self.core_1) == len(self.G2): + # Save the final mapping, otherwise garbage collection deletes it. + self.mapping = self.core_1.copy() + # The mapping is complete. + yield self.mapping + else: + for G1_node, G2_node in self.candidate_pairs_iter(): + if self.syntactic_feasibility(G1_node, G2_node): + if self.semantic_feasibility(G1_node, G2_node): + # Recursive call, adding the feasible state. + newstate = self.state.__class__(self, G1_node, G2_node) + yield from self.match() + + # restore data structures + newstate.restore() + + def semantic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is semantically feasible. + + The semantic feasibility function should return True if it is + acceptable to add the candidate pair (G1_node, G2_node) to the current + partial isomorphism mapping. The logic should focus on semantic + information contained in the edge data or a formalized node class. + + By acceptable, we mean that the subsequent mapping can still become a + complete isomorphism mapping. Thus, if adding the candidate pair + definitely makes it so that the subsequent mapping cannot become a + complete isomorphism mapping, then this function must return False. + + The default semantic feasibility function always returns True. The + effect is that semantics are not considered in the matching of G1 + and G2. + + The semantic checks might differ based on the what type of test is + being performed. A keyword description of the test is stored in + self.test. Here is a quick description of the currently implemented + tests:: + + test='graph' + Indicates that the graph matcher is looking for a graph-graph + isomorphism. + + test='subgraph' + Indicates that the graph matcher is looking for a subgraph-graph + isomorphism such that a subgraph of G1 is isomorphic to G2. + + test='mono' + Indicates that the graph matcher is looking for a subgraph-graph + monomorphism such that a subgraph of G1 is monomorphic to G2. + + Any subclass which redefines semantic_feasibility() must maintain + the above form to keep the match() method functional. Implementations + should consider multigraphs. + """ + return True + + def subgraph_is_isomorphic(self): + """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``. + + Examples + -------- + When creating the `GraphMatcher`, the order of the arguments is important + + >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")]) + >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)]) + + Check whether a subgraph of G is isomorphic to H: + + >>> isomatcher = nx.isomorphism.GraphMatcher(G, H) + >>> isomatcher.subgraph_is_isomorphic() + False + + Check whether a subgraph of H is isomorphic to G: + + >>> isomatcher = nx.isomorphism.GraphMatcher(H, G) + >>> isomatcher.subgraph_is_isomorphic() + True + """ + try: + x = next(self.subgraph_isomorphisms_iter()) + return True + except StopIteration: + return False + + def subgraph_is_monomorphic(self): + """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``. + + Examples + -------- + When creating the `GraphMatcher`, the order of the arguments is important. + + >>> G = nx.Graph([("A", "B"), ("B", "C")]) + >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)]) + + Check whether a subgraph of G is monomorphic to H: + + >>> isomatcher = nx.isomorphism.GraphMatcher(G, H) + >>> isomatcher.subgraph_is_monomorphic() + False + + Check whether a subgraph of H is monomorphic to G: + + >>> isomatcher = nx.isomorphism.GraphMatcher(H, G) + >>> isomatcher.subgraph_is_monomorphic() + True + """ + try: + x = next(self.subgraph_monomorphisms_iter()) + return True + except StopIteration: + return False + + def subgraph_isomorphisms_iter(self): + """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``. + + Examples + -------- + When creating the `GraphMatcher`, the order of the arguments is important + + >>> G = nx.Graph([("A", "B"), ("B", "C"), ("A", "C")]) + >>> H = nx.Graph([(0, 1), (1, 2), (0, 2), (1, 3), (0, 4)]) + + Yield isomorphic mappings between ``H`` and subgraphs of ``G``: + + >>> isomatcher = nx.isomorphism.GraphMatcher(G, H) + >>> list(isomatcher.subgraph_isomorphisms_iter()) + [] + + Yield isomorphic mappings between ``G`` and subgraphs of ``H``: + + >>> isomatcher = nx.isomorphism.GraphMatcher(H, G) + >>> next(isomatcher.subgraph_isomorphisms_iter()) + {0: 'A', 1: 'B', 2: 'C'} + + """ + # Declare that we are looking for graph-subgraph isomorphism. + self.test = "subgraph" + self.initialize() + yield from self.match() + + def subgraph_monomorphisms_iter(self): + """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``. + + Examples + -------- + When creating the `GraphMatcher`, the order of the arguments is important. + + >>> G = nx.Graph([("A", "B"), ("B", "C")]) + >>> H = nx.Graph([(0, 1), (1, 2), (0, 2)]) + + Yield monomorphic mappings between ``H`` and subgraphs of ``G``: + + >>> isomatcher = nx.isomorphism.GraphMatcher(G, H) + >>> list(isomatcher.subgraph_monomorphisms_iter()) + [] + + Yield monomorphic mappings between ``G`` and subgraphs of ``H``: + + >>> isomatcher = nx.isomorphism.GraphMatcher(H, G) + >>> next(isomatcher.subgraph_monomorphisms_iter()) + {0: 'A', 1: 'B', 2: 'C'} + """ + # Declare that we are looking for graph-subgraph monomorphism. + self.test = "mono" + self.initialize() + yield from self.match() + + def syntactic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is syntactically feasible. + + This function returns True if it is adding the candidate pair + to the current partial isomorphism/monomorphism mapping is allowable. + The addition is allowable if the inclusion of the candidate pair does + not make it impossible for an isomorphism/monomorphism to be found. + """ + + # The VF2 algorithm was designed to work with graphs having, at most, + # one edge connecting any two nodes. This is not the case when + # dealing with an MultiGraphs. + # + # Basically, when we test the look-ahead rules R_neighbor, we will + # make sure that the number of edges are checked. We also add + # a R_self check to verify that the number of selfloops is acceptable. + # + # Users might be comparing Graph instances with MultiGraph instances. + # So the generic GraphMatcher class must work with MultiGraphs. + # Care must be taken since the value in the innermost dictionary is a + # singlet for Graph instances. For MultiGraphs, the value in the + # innermost dictionary is a list. + + ### + # Test at each step to get a return value as soon as possible. + ### + + # Look ahead 0 + + # R_self + + # The number of selfloops for G1_node must equal the number of + # self-loops for G2_node. Without this check, we would fail on + # R_neighbor at the next recursion level. But it is good to prune the + # search tree now. + + if self.test == "mono": + if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges( + G2_node, G2_node + ): + return False + else: + if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges( + G2_node, G2_node + ): + return False + + # R_neighbor + + # For each neighbor n' of n in the partial mapping, the corresponding + # node m' is a neighbor of m, and vice versa. Also, the number of + # edges must be equal. + if self.test != "mono": + for neighbor in self.G1[G1_node]: + if neighbor in self.core_1: + if self.core_1[neighbor] not in self.G2[G2_node]: + return False + elif self.G1.number_of_edges( + neighbor, G1_node + ) != self.G2.number_of_edges(self.core_1[neighbor], G2_node): + return False + + for neighbor in self.G2[G2_node]: + if neighbor in self.core_2: + if self.core_2[neighbor] not in self.G1[G1_node]: + return False + elif self.test == "mono": + if self.G1.number_of_edges( + self.core_2[neighbor], G1_node + ) < self.G2.number_of_edges(neighbor, G2_node): + return False + else: + if self.G1.number_of_edges( + self.core_2[neighbor], G1_node + ) != self.G2.number_of_edges(neighbor, G2_node): + return False + + if self.test != "mono": + # Look ahead 1 + + # R_terminout + # The number of neighbors of n in T_1^{inout} is equal to the + # number of neighbors of m that are in T_2^{inout}, and vice versa. + num1 = 0 + for neighbor in self.G1[G1_node]: + if (neighbor in self.inout_1) and (neighbor not in self.core_1): + num1 += 1 + num2 = 0 + for neighbor in self.G2[G2_node]: + if (neighbor in self.inout_2) and (neighbor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # Look ahead 2 + + # R_new + + # The number of neighbors of n that are neither in the core_1 nor + # T_1^{inout} is equal to the number of neighbors of m + # that are neither in core_2 nor T_2^{inout}. + num1 = 0 + for neighbor in self.G1[G1_node]: + if neighbor not in self.inout_1: + num1 += 1 + num2 = 0 + for neighbor in self.G2[G2_node]: + if neighbor not in self.inout_2: + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # Otherwise, this node pair is syntactically feasible! + return True + + +class DiGraphMatcher(GraphMatcher): + """Implementation of VF2 algorithm for matching directed graphs. + + Suitable for DiGraph and MultiDiGraph instances. + """ + + def __init__(self, G1, G2): + """Initialize DiGraphMatcher. + + G1 and G2 should be nx.Graph or nx.MultiGraph instances. + + Examples + -------- + To create a GraphMatcher which checks for syntactic feasibility: + + >>> from networkx.algorithms import isomorphism + >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) + >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) + >>> DiGM = isomorphism.DiGraphMatcher(G1, G2) + """ + super().__init__(G1, G2) + + def _is_directed_matcher(self): + return True + + def candidate_pairs_iter(self): + """Iterator over candidate pairs of nodes in G1 and G2.""" + + # All computations are done using the current state! + + G1_nodes = self.G1_nodes + G2_nodes = self.G2_nodes + min_key = self.G2_node_order.__getitem__ + + # First we compute the out-terminal sets. + T1_out = [node for node in self.out_1 if node not in self.core_1] + T2_out = [node for node in self.out_2 if node not in self.core_2] + + # If T1_out and T2_out are both nonempty. + # P(s) = T1_out x {min T2_out} + if T1_out and T2_out: + node_2 = min(T2_out, key=min_key) + for node_1 in T1_out: + yield node_1, node_2 + + # If T1_out and T2_out were both empty.... + # We compute the in-terminal sets. + + # elif not (T1_out or T2_out): # as suggested by [2], incorrect + else: # as suggested by [1], correct + T1_in = [node for node in self.in_1 if node not in self.core_1] + T2_in = [node for node in self.in_2 if node not in self.core_2] + + # If T1_in and T2_in are both nonempty. + # P(s) = T1_out x {min T2_out} + if T1_in and T2_in: + node_2 = min(T2_in, key=min_key) + for node_1 in T1_in: + yield node_1, node_2 + + # If all terminal sets are empty... + # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} + + # elif not (T1_in or T2_in): # as suggested by [2], incorrect + else: # as inferred from [1], correct + node_2 = min(G2_nodes - set(self.core_2), key=min_key) + for node_1 in G1_nodes: + if node_1 not in self.core_1: + yield node_1, node_2 + + # For all other cases, we don't have any candidate pairs. + + def initialize(self): + """Reinitializes the state of the algorithm. + + This method should be redefined if using something other than DiGMState. + If only subclassing GraphMatcher, a redefinition is not necessary. + """ + + # core_1[n] contains the index of the node paired with n, which is m, + # provided n is in the mapping. + # core_2[m] contains the index of the node paired with m, which is n, + # provided m is in the mapping. + self.core_1 = {} + self.core_2 = {} + + # See the paper for definitions of M_x and T_x^{y} + + # in_1[n] is non-zero if n is in M_1 or in T_1^{in} + # out_1[n] is non-zero if n is in M_1 or in T_1^{out} + # + # in_2[m] is non-zero if m is in M_2 or in T_2^{in} + # out_2[m] is non-zero if m is in M_2 or in T_2^{out} + # + # The value stored is the depth of the search tree when the node became + # part of the corresponding set. + self.in_1 = {} + self.in_2 = {} + self.out_1 = {} + self.out_2 = {} + + self.state = DiGMState(self) + + # Provide a convenient way to access the isomorphism mapping. + self.mapping = self.core_1.copy() + + def syntactic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is syntactically feasible. + + This function returns True if it is adding the candidate pair + to the current partial isomorphism/monomorphism mapping is allowable. + The addition is allowable if the inclusion of the candidate pair does + not make it impossible for an isomorphism/monomorphism to be found. + """ + + # The VF2 algorithm was designed to work with graphs having, at most, + # one edge connecting any two nodes. This is not the case when + # dealing with an MultiGraphs. + # + # Basically, when we test the look-ahead rules R_pred and R_succ, we + # will make sure that the number of edges are checked. We also add + # a R_self check to verify that the number of selfloops is acceptable. + + # Users might be comparing DiGraph instances with MultiDiGraph + # instances. So the generic DiGraphMatcher class must work with + # MultiDiGraphs. Care must be taken since the value in the innermost + # dictionary is a singlet for DiGraph instances. For MultiDiGraphs, + # the value in the innermost dictionary is a list. + + ### + # Test at each step to get a return value as soon as possible. + ### + + # Look ahead 0 + + # R_self + + # The number of selfloops for G1_node must equal the number of + # self-loops for G2_node. Without this check, we would fail on R_pred + # at the next recursion level. This should prune the tree even further. + if self.test == "mono": + if self.G1.number_of_edges(G1_node, G1_node) < self.G2.number_of_edges( + G2_node, G2_node + ): + return False + else: + if self.G1.number_of_edges(G1_node, G1_node) != self.G2.number_of_edges( + G2_node, G2_node + ): + return False + + # R_pred + + # For each predecessor n' of n in the partial mapping, the + # corresponding node m' is a predecessor of m, and vice versa. Also, + # the number of edges must be equal + if self.test != "mono": + for predecessor in self.G1.pred[G1_node]: + if predecessor in self.core_1: + if self.core_1[predecessor] not in self.G2.pred[G2_node]: + return False + elif self.G1.number_of_edges( + predecessor, G1_node + ) != self.G2.number_of_edges(self.core_1[predecessor], G2_node): + return False + + for predecessor in self.G2.pred[G2_node]: + if predecessor in self.core_2: + if self.core_2[predecessor] not in self.G1.pred[G1_node]: + return False + elif self.test == "mono": + if self.G1.number_of_edges( + self.core_2[predecessor], G1_node + ) < self.G2.number_of_edges(predecessor, G2_node): + return False + else: + if self.G1.number_of_edges( + self.core_2[predecessor], G1_node + ) != self.G2.number_of_edges(predecessor, G2_node): + return False + + # R_succ + + # For each successor n' of n in the partial mapping, the corresponding + # node m' is a successor of m, and vice versa. Also, the number of + # edges must be equal. + if self.test != "mono": + for successor in self.G1[G1_node]: + if successor in self.core_1: + if self.core_1[successor] not in self.G2[G2_node]: + return False + elif self.G1.number_of_edges( + G1_node, successor + ) != self.G2.number_of_edges(G2_node, self.core_1[successor]): + return False + + for successor in self.G2[G2_node]: + if successor in self.core_2: + if self.core_2[successor] not in self.G1[G1_node]: + return False + elif self.test == "mono": + if self.G1.number_of_edges( + G1_node, self.core_2[successor] + ) < self.G2.number_of_edges(G2_node, successor): + return False + else: + if self.G1.number_of_edges( + G1_node, self.core_2[successor] + ) != self.G2.number_of_edges(G2_node, successor): + return False + + if self.test != "mono": + # Look ahead 1 + + # R_termin + # The number of predecessors of n that are in T_1^{in} is equal to the + # number of predecessors of m that are in T_2^{in}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor in self.in_1) and (predecessor not in self.core_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor in self.in_2) and (predecessor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # The number of successors of n that are in T_1^{in} is equal to the + # number of successors of m that are in T_2^{in}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor in self.in_1) and (successor not in self.core_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor in self.in_2) and (successor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # R_termout + + # The number of predecessors of n that are in T_1^{out} is equal to the + # number of predecessors of m that are in T_2^{out}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor in self.out_1) and (predecessor not in self.core_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor in self.out_2) and (predecessor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # The number of successors of n that are in T_1^{out} is equal to the + # number of successors of m that are in T_2^{out}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor in self.out_1) and (successor not in self.core_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor in self.out_2) and (successor not in self.core_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # Look ahead 2 + + # R_new + + # The number of predecessors of n that are neither in the core_1 nor + # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m + # that are neither in core_2 nor T_2^{in} nor T_2^{out}. + num1 = 0 + for predecessor in self.G1.pred[G1_node]: + if (predecessor not in self.in_1) and (predecessor not in self.out_1): + num1 += 1 + num2 = 0 + for predecessor in self.G2.pred[G2_node]: + if (predecessor not in self.in_2) and (predecessor not in self.out_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # The number of successors of n that are neither in the core_1 nor + # T_1^{in} nor T_1^{out} is equal to the number of successors of m + # that are neither in core_2 nor T_2^{in} nor T_2^{out}. + num1 = 0 + for successor in self.G1[G1_node]: + if (successor not in self.in_1) and (successor not in self.out_1): + num1 += 1 + num2 = 0 + for successor in self.G2[G2_node]: + if (successor not in self.in_2) and (successor not in self.out_2): + num2 += 1 + if self.test == "graph": + if num1 != num2: + return False + else: # self.test == 'subgraph' + if not (num1 >= num2): + return False + + # Otherwise, this node pair is syntactically feasible! + return True + + def subgraph_is_isomorphic(self): + """Returns `True` if a subgraph of ``G1`` is isomorphic to ``G2``. + + Examples + -------- + When creating the `DiGraphMatcher`, the order of the arguments is important + + >>> G = nx.DiGraph([("A", "B"), ("B", "A"), ("B", "C"), ("C", "B")]) + >>> H = nx.DiGraph(nx.path_graph(5)) + + Check whether a subgraph of G is isomorphic to H: + + >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H) + >>> isomatcher.subgraph_is_isomorphic() + False + + Check whether a subgraph of H is isomorphic to G: + + >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G) + >>> isomatcher.subgraph_is_isomorphic() + True + """ + return super().subgraph_is_isomorphic() + + def subgraph_is_monomorphic(self): + """Returns `True` if a subgraph of ``G1`` is monomorphic to ``G2``. + + Examples + -------- + When creating the `DiGraphMatcher`, the order of the arguments is important. + + >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")]) + >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)]) + + Check whether a subgraph of G is monomorphic to H: + + >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H) + >>> isomatcher.subgraph_is_monomorphic() + False + + Check whether a subgraph of H is isomorphic to G: + + >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G) + >>> isomatcher.subgraph_is_monomorphic() + True + """ + return super().subgraph_is_monomorphic() + + def subgraph_isomorphisms_iter(self): + """Generator over isomorphisms between a subgraph of ``G1`` and ``G2``. + + Examples + -------- + When creating the `DiGraphMatcher`, the order of the arguments is important + + >>> G = nx.DiGraph([("B", "C"), ("C", "B"), ("C", "D"), ("D", "C")]) + >>> H = nx.DiGraph(nx.path_graph(5)) + + Yield isomorphic mappings between ``H`` and subgraphs of ``G``: + + >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H) + >>> list(isomatcher.subgraph_isomorphisms_iter()) + [] + + Yield isomorphic mappings between ``G`` and subgraphs of ``H``: + + >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G) + >>> next(isomatcher.subgraph_isomorphisms_iter()) + {0: 'B', 1: 'C', 2: 'D'} + """ + return super().subgraph_isomorphisms_iter() + + def subgraph_monomorphisms_iter(self): + """Generator over monomorphisms between a subgraph of ``G1`` and ``G2``. + + Examples + -------- + When creating the `DiGraphMatcher`, the order of the arguments is important. + + >>> G = nx.DiGraph([("A", "B"), ("C", "B"), ("D", "C")]) + >>> H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 2)]) + + Yield monomorphic mappings between ``H`` and subgraphs of ``G``: + + >>> isomatcher = nx.isomorphism.DiGraphMatcher(G, H) + >>> list(isomatcher.subgraph_monomorphisms_iter()) + [] + + Yield monomorphic mappings between ``G`` and subgraphs of ``H``: + + >>> isomatcher = nx.isomorphism.DiGraphMatcher(H, G) + >>> next(isomatcher.subgraph_monomorphisms_iter()) + {3: 'A', 2: 'B', 1: 'C', 0: 'D'} + """ + return super().subgraph_monomorphisms_iter() + + +class GMState: + """Internal representation of state for the GraphMatcher class. + + This class is used internally by the GraphMatcher class. It is used + only to store state specific data. There will be at most G2.order() of + these objects in memory at a time, due to the depth-first search + strategy employed by the VF2 algorithm. + """ + + def __init__(self, GM, G1_node=None, G2_node=None): + """Initializes GMState object. + + Pass in the GraphMatcher to which this GMState belongs and the + new node pair that will be added to the GraphMatcher's current + isomorphism mapping. + """ + self.GM = GM + + # Initialize the last stored node pair. + self.G1_node = None + self.G2_node = None + self.depth = len(GM.core_1) + + if G1_node is None or G2_node is None: + # Then we reset the class variables + GM.core_1 = {} + GM.core_2 = {} + GM.inout_1 = {} + GM.inout_2 = {} + + # Watch out! G1_node == 0 should evaluate to True. + if G1_node is not None and G2_node is not None: + # Add the node pair to the isomorphism mapping. + GM.core_1[G1_node] = G2_node + GM.core_2[G2_node] = G1_node + + # Store the node that was added last. + self.G1_node = G1_node + self.G2_node = G2_node + + # Now we must update the other two vectors. + # We will add only if it is not in there already! + self.depth = len(GM.core_1) + + # First we add the new nodes... + if G1_node not in GM.inout_1: + GM.inout_1[G1_node] = self.depth + if G2_node not in GM.inout_2: + GM.inout_2[G2_node] = self.depth + + # Now we add every other node... + + # Updates for T_1^{inout} + new_nodes = set() + for node in GM.core_1: + new_nodes.update( + [neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1] + ) + for node in new_nodes: + if node not in GM.inout_1: + GM.inout_1[node] = self.depth + + # Updates for T_2^{inout} + new_nodes = set() + for node in GM.core_2: + new_nodes.update( + [neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2] + ) + for node in new_nodes: + if node not in GM.inout_2: + GM.inout_2[node] = self.depth + + def restore(self): + """Deletes the GMState object and restores the class variables.""" + # First we remove the node that was added from the core vectors. + # Watch out! G1_node == 0 should evaluate to True. + if self.G1_node is not None and self.G2_node is not None: + del self.GM.core_1[self.G1_node] + del self.GM.core_2[self.G2_node] + + # Now we revert the other two vectors. + # Thus, we delete all entries which have this depth level. + for vector in (self.GM.inout_1, self.GM.inout_2): + for node in list(vector.keys()): + if vector[node] == self.depth: + del vector[node] + + +class DiGMState: + """Internal representation of state for the DiGraphMatcher class. + + This class is used internally by the DiGraphMatcher class. It is used + only to store state specific data. There will be at most G2.order() of + these objects in memory at a time, due to the depth-first search + strategy employed by the VF2 algorithm. + + """ + + def __init__(self, GM, G1_node=None, G2_node=None): + """Initializes DiGMState object. + + Pass in the DiGraphMatcher to which this DiGMState belongs and the + new node pair that will be added to the GraphMatcher's current + isomorphism mapping. + """ + self.GM = GM + + # Initialize the last stored node pair. + self.G1_node = None + self.G2_node = None + self.depth = len(GM.core_1) + + if G1_node is None or G2_node is None: + # Then we reset the class variables + GM.core_1 = {} + GM.core_2 = {} + GM.in_1 = {} + GM.in_2 = {} + GM.out_1 = {} + GM.out_2 = {} + + # Watch out! G1_node == 0 should evaluate to True. + if G1_node is not None and G2_node is not None: + # Add the node pair to the isomorphism mapping. + GM.core_1[G1_node] = G2_node + GM.core_2[G2_node] = G1_node + + # Store the node that was added last. + self.G1_node = G1_node + self.G2_node = G2_node + + # Now we must update the other four vectors. + # We will add only if it is not in there already! + self.depth = len(GM.core_1) + + # First we add the new nodes... + for vector in (GM.in_1, GM.out_1): + if G1_node not in vector: + vector[G1_node] = self.depth + for vector in (GM.in_2, GM.out_2): + if G2_node not in vector: + vector[G2_node] = self.depth + + # Now we add every other node... + + # Updates for T_1^{in} + new_nodes = set() + for node in GM.core_1: + new_nodes.update( + [ + predecessor + for predecessor in GM.G1.predecessors(node) + if predecessor not in GM.core_1 + ] + ) + for node in new_nodes: + if node not in GM.in_1: + GM.in_1[node] = self.depth + + # Updates for T_2^{in} + new_nodes = set() + for node in GM.core_2: + new_nodes.update( + [ + predecessor + for predecessor in GM.G2.predecessors(node) + if predecessor not in GM.core_2 + ] + ) + for node in new_nodes: + if node not in GM.in_2: + GM.in_2[node] = self.depth + + # Updates for T_1^{out} + new_nodes = set() + for node in GM.core_1: + new_nodes.update( + [ + successor + for successor in GM.G1.successors(node) + if successor not in GM.core_1 + ] + ) + for node in new_nodes: + if node not in GM.out_1: + GM.out_1[node] = self.depth + + # Updates for T_2^{out} + new_nodes = set() + for node in GM.core_2: + new_nodes.update( + [ + successor + for successor in GM.G2.successors(node) + if successor not in GM.core_2 + ] + ) + for node in new_nodes: + if node not in GM.out_2: + GM.out_2[node] = self.depth + + def restore(self): + """Deletes the DiGMState object and restores the class variables.""" + + # First we remove the node that was added from the core vectors. + # Watch out! G1_node == 0 should evaluate to True. + if self.G1_node is not None and self.G2_node is not None: + del self.GM.core_1[self.G1_node] + del self.GM.core_2[self.G2_node] + + # Now we revert the other four vectors. + # Thus, we delete all entries which have this depth level. + for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2): + for node in list(vector.keys()): + if vector[node] == self.depth: + del vector[node] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py new file mode 100644 index 0000000..b48820d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/matchhelpers.py @@ -0,0 +1,352 @@ +"""Functions which help end users define customize node_match and +edge_match functions to use during isomorphism checks. +""" + +import math +import types +from itertools import permutations + +__all__ = [ + "categorical_node_match", + "categorical_edge_match", + "categorical_multiedge_match", + "numerical_node_match", + "numerical_edge_match", + "numerical_multiedge_match", + "generic_node_match", + "generic_edge_match", + "generic_multiedge_match", +] + + +def copyfunc(f, name=None): + """Returns a deepcopy of a function.""" + return types.FunctionType( + f.__code__, f.__globals__, name or f.__name__, f.__defaults__, f.__closure__ + ) + + +def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08): + """Returns True if x and y are sufficiently close, elementwise. + + Parameters + ---------- + rtol : float + The relative error tolerance. + atol : float + The absolute error tolerance. + + """ + # assume finite weights, see numpy.allclose() for reference + return all(math.isclose(xi, yi, rel_tol=rtol, abs_tol=atol) for xi, yi in zip(x, y)) + + +categorical_doc = """ +Returns a comparison function for a categorical node attribute. + +The value(s) of the attr(s) must be hashable and comparable via the == +operator since they are placed into a set([]) object. If the sets from +G1 and G2 are the same, then the constructed function returns True. + +Parameters +---------- +attr : string | list + The categorical node attribute to compare, or a list of categorical + node attributes to compare. +default : value | list + The default value for the categorical node attribute, or a list of + default values for the categorical node attributes. + +Returns +------- +match : function + The customized, categorical `node_match` function. + +Examples +-------- +>>> import networkx.algorithms.isomorphism as iso +>>> nm = iso.categorical_node_match("size", 1) +>>> nm = iso.categorical_node_match(["color", "size"], ["red", 2]) + +""" + + +def categorical_node_match(attr, default): + if isinstance(attr, str): + + def match(data1, data2): + return data1.get(attr, default) == data2.get(attr, default) + + else: + attrs = list(zip(attr, default)) # Python 3 + + def match(data1, data2): + return all(data1.get(attr, d) == data2.get(attr, d) for attr, d in attrs) + + return match + + +categorical_edge_match = copyfunc(categorical_node_match, "categorical_edge_match") + + +def categorical_multiedge_match(attr, default): + if isinstance(attr, str): + + def match(datasets1, datasets2): + values1 = {data.get(attr, default) for data in datasets1.values()} + values2 = {data.get(attr, default) for data in datasets2.values()} + return values1 == values2 + + else: + attrs = list(zip(attr, default)) # Python 3 + + def match(datasets1, datasets2): + values1 = set() + for data1 in datasets1.values(): + x = tuple(data1.get(attr, d) for attr, d in attrs) + values1.add(x) + values2 = set() + for data2 in datasets2.values(): + x = tuple(data2.get(attr, d) for attr, d in attrs) + values2.add(x) + return values1 == values2 + + return match + + +# Docstrings for categorical functions. +categorical_node_match.__doc__ = categorical_doc +categorical_edge_match.__doc__ = categorical_doc.replace("node", "edge") +tmpdoc = categorical_doc.replace("node", "edge") +tmpdoc = tmpdoc.replace("categorical_edge_match", "categorical_multiedge_match") +categorical_multiedge_match.__doc__ = tmpdoc + + +numerical_doc = """ +Returns a comparison function for a numerical node attribute. + +The value(s) of the attr(s) must be numerical and sortable. If the +sorted list of values from G1 and G2 are the same within some +tolerance, then the constructed function returns True. + +Parameters +---------- +attr : string | list + The numerical node attribute to compare, or a list of numerical + node attributes to compare. +default : value | list + The default value for the numerical node attribute, or a list of + default values for the numerical node attributes. +rtol : float + The relative error tolerance. +atol : float + The absolute error tolerance. + +Returns +------- +match : function + The customized, numerical `node_match` function. + +Examples +-------- +>>> import networkx.algorithms.isomorphism as iso +>>> nm = iso.numerical_node_match("weight", 1.0) +>>> nm = iso.numerical_node_match(["weight", "linewidth"], [0.25, 0.5]) + +""" + + +def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): + if isinstance(attr, str): + + def match(data1, data2): + return math.isclose( + data1.get(attr, default), + data2.get(attr, default), + rel_tol=rtol, + abs_tol=atol, + ) + + else: + attrs = list(zip(attr, default)) # Python 3 + + def match(data1, data2): + values1 = [data1.get(attr, d) for attr, d in attrs] + values2 = [data2.get(attr, d) for attr, d in attrs] + return allclose(values1, values2, rtol=rtol, atol=atol) + + return match + + +numerical_edge_match = copyfunc(numerical_node_match, "numerical_edge_match") + + +def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): + if isinstance(attr, str): + + def match(datasets1, datasets2): + values1 = sorted(data.get(attr, default) for data in datasets1.values()) + values2 = sorted(data.get(attr, default) for data in datasets2.values()) + return allclose(values1, values2, rtol=rtol, atol=atol) + + else: + attrs = list(zip(attr, default)) # Python 3 + + def match(datasets1, datasets2): + values1 = [] + for data1 in datasets1.values(): + x = tuple(data1.get(attr, d) for attr, d in attrs) + values1.append(x) + values2 = [] + for data2 in datasets2.values(): + x = tuple(data2.get(attr, d) for attr, d in attrs) + values2.append(x) + values1.sort() + values2.sort() + for xi, yi in zip(values1, values2): + if not allclose(xi, yi, rtol=rtol, atol=atol): + return False + else: + return True + + return match + + +# Docstrings for numerical functions. +numerical_node_match.__doc__ = numerical_doc +numerical_edge_match.__doc__ = numerical_doc.replace("node", "edge") +tmpdoc = numerical_doc.replace("node", "edge") +tmpdoc = tmpdoc.replace("numerical_edge_match", "numerical_multiedge_match") +numerical_multiedge_match.__doc__ = tmpdoc + + +generic_doc = """ +Returns a comparison function for a generic attribute. + +The value(s) of the attr(s) are compared using the specified +operators. If all the attributes are equal, then the constructed +function returns True. + +Parameters +---------- +attr : string | list + The node attribute to compare, or a list of node attributes + to compare. +default : value | list + The default value for the node attribute, or a list of + default values for the node attributes. +op : callable | list + The operator to use when comparing attribute values, or a list + of operators to use when comparing values for each attribute. + +Returns +------- +match : function + The customized, generic `node_match` function. + +Examples +-------- +>>> from operator import eq +>>> from math import isclose +>>> from networkx.algorithms.isomorphism import generic_node_match +>>> nm = generic_node_match("weight", 1.0, isclose) +>>> nm = generic_node_match("color", "red", eq) +>>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq]) + +""" + + +def generic_node_match(attr, default, op): + if isinstance(attr, str): + + def match(data1, data2): + return op(data1.get(attr, default), data2.get(attr, default)) + + else: + attrs = list(zip(attr, default, op)) # Python 3 + + def match(data1, data2): + for attr, d, operator in attrs: + if not operator(data1.get(attr, d), data2.get(attr, d)): + return False + else: + return True + + return match + + +generic_edge_match = copyfunc(generic_node_match, "generic_edge_match") + + +def generic_multiedge_match(attr, default, op): + """Returns a comparison function for a generic attribute. + + The value(s) of the attr(s) are compared using the specified + operators. If all the attributes are equal, then the constructed + function returns True. Potentially, the constructed edge_match + function can be slow since it must verify that no isomorphism + exists between the multiedges before it returns False. + + Parameters + ---------- + attr : string | list + The edge attribute to compare, or a list of node attributes + to compare. + default : value | list + The default value for the edge attribute, or a list of + default values for the edgeattributes. + op : callable | list + The operator to use when comparing attribute values, or a list + of operators to use when comparing values for each attribute. + + Returns + ------- + match : function + The customized, generic `edge_match` function. + + Examples + -------- + >>> from operator import eq + >>> from math import isclose + >>> from networkx.algorithms.isomorphism import generic_node_match + >>> nm = generic_node_match("weight", 1.0, isclose) + >>> nm = generic_node_match("color", "red", eq) + >>> nm = generic_node_match(["weight", "color"], [1.0, "red"], [isclose, eq]) + + """ + + # This is slow, but generic. + # We must test every possible isomorphism between the edges. + if isinstance(attr, str): + attr = [attr] + default = [default] + op = [op] + attrs = list(zip(attr, default)) # Python 3 + + def match(datasets1, datasets2): + values1 = [] + for data1 in datasets1.values(): + x = tuple(data1.get(attr, d) for attr, d in attrs) + values1.append(x) + values2 = [] + for data2 in datasets2.values(): + x = tuple(data2.get(attr, d) for attr, d in attrs) + values2.append(x) + for vals2 in permutations(values2): + for xi, yi in zip(values1, vals2): + if not all(map(lambda x, y, z: z(x, y), xi, yi, op)): + # This is not an isomorphism, go to next permutation. + break + else: + # Then we found an isomorphism. + return True + else: + # Then there are no isomorphisms between the multiedges. + return False + + return match + + +# Docstrings for numerical functions. +generic_node_match.__doc__ = generic_doc +generic_edge_match.__doc__ = generic_doc.replace("node", "edge") diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py new file mode 100644 index 0000000..62cacc7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/temporalisomorphvf2.py @@ -0,0 +1,308 @@ +""" +***************************** +Time-respecting VF2 Algorithm +***************************** + +An extension of the VF2 algorithm for time-respecting graph isomorphism +testing in temporal graphs. + +A temporal graph is one in which edges contain a datetime attribute, +denoting when interaction occurred between the incident nodes. A +time-respecting subgraph of a temporal graph is a subgraph such that +all interactions incident to a node occurred within a time threshold, +delta, of each other. A directed time-respecting subgraph has the +added constraint that incoming interactions to a node must precede +outgoing interactions from the same node - this enforces a sense of +directed flow. + +Introduction +------------ + +The TimeRespectingGraphMatcher and TimeRespectingDiGraphMatcher +extend the GraphMatcher and DiGraphMatcher classes, respectively, +to include temporal constraints on matches. This is achieved through +a semantic check, via the semantic_feasibility() function. + +As well as including G1 (the graph in which to seek embeddings) and +G2 (the subgraph structure of interest), the name of the temporal +attribute on the edges and the time threshold, delta, must be supplied +as arguments to the matching constructors. + +A delta of zero is the strictest temporal constraint on the match - +only embeddings in which all interactions occur at the same time will +be returned. A delta of one day will allow embeddings in which +adjacent interactions occur up to a day apart. + +Examples +-------- + +Examples will be provided when the datetime type has been incorporated. + + +Temporal Subgraph Isomorphism +----------------------------- + +A brief discussion of the somewhat diverse current literature will be +included here. + +References +---------- + +[1] Redmond, U. and Cunningham, P. Temporal subgraph isomorphism. In: +The 2013 IEEE/ACM International Conference on Advances in Social +Networks Analysis and Mining (ASONAM). Niagara Falls, Canada; 2013: +pages 1451 - 1452. [65] + +For a discussion of the literature on temporal networks: + +[3] P. Holme and J. Saramaki. Temporal networks. Physics Reports, +519(3):97–125, 2012. + +Notes +----- + +Handles directed and undirected graphs and graphs with parallel edges. + +""" + +import networkx as nx + +from .isomorphvf2 import DiGraphMatcher, GraphMatcher + +__all__ = ["TimeRespectingGraphMatcher", "TimeRespectingDiGraphMatcher"] + + +class TimeRespectingGraphMatcher(GraphMatcher): + def __init__(self, G1, G2, temporal_attribute_name, delta): + """Initialize TimeRespectingGraphMatcher. + + G1 and G2 should be nx.Graph or nx.MultiGraph instances. + + Examples + -------- + To create a TimeRespectingGraphMatcher which checks for + syntactic and semantic feasibility: + + >>> from networkx.algorithms import isomorphism + >>> from datetime import timedelta + >>> G1 = nx.Graph(nx.path_graph(4, create_using=nx.Graph())) + + >>> G2 = nx.Graph(nx.path_graph(4, create_using=nx.Graph())) + + >>> GM = isomorphism.TimeRespectingGraphMatcher( + ... G1, G2, "date", timedelta(days=1) + ... ) + """ + self.temporal_attribute_name = temporal_attribute_name + self.delta = delta + super().__init__(G1, G2) + + def one_hop(self, Gx, Gx_node, neighbors): + """ + Edges one hop out from a node in the mapping should be + time-respecting with respect to each other. + """ + dates = [] + for n in neighbors: + if isinstance(Gx, nx.Graph): # Graph G[u][v] returns the data dictionary. + dates.append(Gx[Gx_node][n][self.temporal_attribute_name]) + else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. + for edge in Gx[Gx_node][ + n + ].values(): # Iterates all edges between node pair. + dates.append(edge[self.temporal_attribute_name]) + if any(x is None for x in dates): + raise ValueError("Datetime not supplied for at least one edge.") + return not dates or max(dates) - min(dates) <= self.delta + + def two_hop(self, Gx, core_x, Gx_node, neighbors): + """ + Paths of length 2 from Gx_node should be time-respecting. + """ + return all( + self.one_hop(Gx, v, [n for n in Gx[v] if n in core_x] + [Gx_node]) + for v in neighbors + ) + + def semantic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is semantically + feasible. + + Any subclass which redefines semantic_feasibility() must + maintain the self.tests if needed, to keep the match() method + functional. Implementations should consider multigraphs. + """ + neighbors = [n for n in self.G1[G1_node] if n in self.core_1] + if not self.one_hop(self.G1, G1_node, neighbors): # Fail fast on first node. + return False + if not self.two_hop(self.G1, self.core_1, G1_node, neighbors): + return False + # Otherwise, this node is semantically feasible! + return True + + +class TimeRespectingDiGraphMatcher(DiGraphMatcher): + def __init__(self, G1, G2, temporal_attribute_name, delta): + """Initialize TimeRespectingDiGraphMatcher. + + G1 and G2 should be nx.DiGraph or nx.MultiDiGraph instances. + + Examples + -------- + To create a TimeRespectingDiGraphMatcher which checks for + syntactic and semantic feasibility: + + >>> from networkx.algorithms import isomorphism + >>> from datetime import timedelta + >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) + + >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) + + >>> GM = isomorphism.TimeRespectingDiGraphMatcher( + ... G1, G2, "date", timedelta(days=1) + ... ) + """ + self.temporal_attribute_name = temporal_attribute_name + self.delta = delta + super().__init__(G1, G2) + + def get_pred_dates(self, Gx, Gx_node, core_x, pred): + """ + Get the dates of edges from predecessors. + """ + pred_dates = [] + if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary. + for n in pred: + pred_dates.append(Gx[n][Gx_node][self.temporal_attribute_name]) + else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. + for n in pred: + for edge in Gx[n][ + Gx_node + ].values(): # Iterates all edge data between node pair. + pred_dates.append(edge[self.temporal_attribute_name]) + return pred_dates + + def get_succ_dates(self, Gx, Gx_node, core_x, succ): + """ + Get the dates of edges to successors. + """ + succ_dates = [] + if isinstance(Gx, nx.DiGraph): # Graph G[u][v] returns the data dictionary. + for n in succ: + succ_dates.append(Gx[Gx_node][n][self.temporal_attribute_name]) + else: # MultiGraph G[u][v] returns a dictionary of key -> data dictionary. + for n in succ: + for edge in Gx[Gx_node][ + n + ].values(): # Iterates all edge data between node pair. + succ_dates.append(edge[self.temporal_attribute_name]) + return succ_dates + + def one_hop(self, Gx, Gx_node, core_x, pred, succ): + """ + The ego node. + """ + pred_dates = self.get_pred_dates(Gx, Gx_node, core_x, pred) + succ_dates = self.get_succ_dates(Gx, Gx_node, core_x, succ) + return self.test_one(pred_dates, succ_dates) and self.test_two( + pred_dates, succ_dates + ) + + def two_hop_pred(self, Gx, Gx_node, core_x, pred): + """ + The predecessors of the ego node. + """ + return all( + self.one_hop( + Gx, + p, + core_x, + self.preds(Gx, core_x, p), + self.succs(Gx, core_x, p, Gx_node), + ) + for p in pred + ) + + def two_hop_succ(self, Gx, Gx_node, core_x, succ): + """ + The successors of the ego node. + """ + return all( + self.one_hop( + Gx, + s, + core_x, + self.preds(Gx, core_x, s, Gx_node), + self.succs(Gx, core_x, s), + ) + for s in succ + ) + + def preds(self, Gx, core_x, v, Gx_node=None): + pred = [n for n in Gx.predecessors(v) if n in core_x] + if Gx_node: + pred.append(Gx_node) + return pred + + def succs(self, Gx, core_x, v, Gx_node=None): + succ = [n for n in Gx.successors(v) if n in core_x] + if Gx_node: + succ.append(Gx_node) + return succ + + def test_one(self, pred_dates, succ_dates): + """ + Edges one hop out from Gx_node in the mapping should be + time-respecting with respect to each other, regardless of + direction. + """ + time_respecting = True + dates = pred_dates + succ_dates + + if any(x is None for x in dates): + raise ValueError("Date or datetime not supplied for at least one edge.") + + dates.sort() # Small to large. + if 0 < len(dates) and not (dates[-1] - dates[0] <= self.delta): + time_respecting = False + return time_respecting + + def test_two(self, pred_dates, succ_dates): + """ + Edges from a dual Gx_node in the mapping should be ordered in + a time-respecting manner. + """ + time_respecting = True + pred_dates.sort() + succ_dates.sort() + # First out before last in; negative of the necessary condition for time-respect. + if ( + 0 < len(succ_dates) + and 0 < len(pred_dates) + and succ_dates[0] < pred_dates[-1] + ): + time_respecting = False + return time_respecting + + def semantic_feasibility(self, G1_node, G2_node): + """Returns True if adding (G1_node, G2_node) is semantically + feasible. + + Any subclass which redefines semantic_feasibility() must + maintain the self.tests if needed, to keep the match() method + functional. Implementations should consider multigraphs. + """ + pred, succ = ( + [n for n in self.G1.predecessors(G1_node) if n in self.core_1], + [n for n in self.G1.successors(G1_node) if n in self.core_1], + ) + if not self.one_hop( + self.G1, G1_node, self.core_1, pred, succ + ): # Fail fast on first node. + return False + if not self.two_hop_pred(self.G1, G1_node, self.core_1, pred): + return False + if not self.two_hop_succ(self.G1, G1_node, self.core_1, succ): + return False + # Otherwise, this node is semantically feasible! + return True diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..5e6989c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-312.pyc new file mode 100644 index 0000000..3bca9da Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_ismags.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-312.pyc new file mode 100644 index 0000000..abae016 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphism.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-312.pyc new file mode 100644 index 0000000..2a1ec58 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_isomorphvf2.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-312.pyc new file mode 100644 index 0000000..5956427 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_match_helpers.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-312.pyc new file mode 100644 index 0000000..79545e2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_temporalisomorphvf2.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-312.pyc new file mode 100644 index 0000000..91d771f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_tree_isomorphism.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-312.pyc new file mode 100644 index 0000000..3c4e9c9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-312.pyc new file mode 100644 index 0000000..23fb45d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2pp_helpers.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-312.pyc new file mode 100644 index 0000000..0a53986 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/__pycache__/test_vf2userfunc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 new file mode 100644 index 0000000..dac54f0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.A99 differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 new file mode 100644 index 0000000..6c6af68 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/iso_r01_s80.B99 differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 new file mode 100644 index 0000000..60c3a3c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.A99 differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 new file mode 100644 index 0000000..0236872 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/si2_b06_m200.B99 differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py new file mode 100644 index 0000000..46e5cea --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_ismags.py @@ -0,0 +1,351 @@ +""" +Tests for ISMAGS isomorphism algorithm. +""" + +import pytest + +import networkx as nx +from networkx.algorithms import isomorphism as iso + + +def _matches_to_sets(matches): + """ + Helper function to facilitate comparing collections of dictionaries in + which order does not matter. + """ + return {frozenset(m.items()) for m in matches} + + +class TestSelfIsomorphism: + data = [ + ( + [ + (0, {"name": "a"}), + (1, {"name": "a"}), + (2, {"name": "b"}), + (3, {"name": "b"}), + (4, {"name": "a"}), + (5, {"name": "a"}), + ], + [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)], + ), + (range(1, 5), [(1, 2), (2, 4), (4, 3), (3, 1)]), + ( + [], + [ + (0, 1), + (1, 2), + (2, 3), + (3, 4), + (4, 5), + (5, 0), + (0, 6), + (6, 7), + (2, 8), + (8, 9), + (4, 10), + (10, 11), + ], + ), + ([], [(0, 1), (1, 2), (1, 4), (2, 3), (3, 5), (3, 6)]), + ( + # 5 - 4 \ / 12 - 13 + # 0 - 3 + # 9 - 8 / \ 16 - 17 + # Assume 0 and 3 are coupled and no longer equivalent. + # Coupling node 4 to 8 means that 5 and 9 + # are no longer equivalent, pushing them in their own partitions. + # However, {5, 9} was considered equivalent to {13, 17}, which is *not* + # taken into account in the second refinement, tripping a (former) + # assertion failure. Note that this is actually the minimal failing + # example. + [], + [ + (0, 3), + (0, 4), + (4, 5), + (0, 8), + (8, 9), + (3, 12), + (12, 13), + (3, 16), + (16, 17), + ], + ), + ] + + def test_self_isomorphism(self): + """ + For some small, symmetric graphs, make sure that 1) they are isomorphic + to themselves, and 2) that only the identity mapping is found. + """ + for node_data, edge_data in self.data: + graph = nx.Graph() + graph.add_nodes_from(node_data) + graph.add_edges_from(edge_data) + + ismags = iso.ISMAGS( + graph, graph, node_match=iso.categorical_node_match("name", None) + ) + assert ismags.is_isomorphic() + assert ismags.subgraph_is_isomorphic() + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in graph.nodes} + ] + + def test_edgecase_self_isomorphism(self): + """ + This edgecase is one of the cases in which it is hard to find all + symmetry elements. + """ + graph = nx.Graph() + nx.add_path(graph, range(5)) + graph.add_edges_from([(2, 5), (5, 6)]) + + ismags = iso.ISMAGS(graph, graph) + ismags_answer = list(ismags.find_isomorphisms(True)) + assert ismags_answer == [{n: n for n in graph.nodes}] + + graph = nx.relabel_nodes(graph, {0: 0, 1: 1, 2: 2, 3: 3, 4: 6, 5: 4, 6: 5}) + ismags = iso.ISMAGS(graph, graph) + ismags_answer = list(ismags.find_isomorphisms(True)) + assert ismags_answer == [{n: n for n in graph.nodes}] + + def test_directed_self_isomorphism(self): + """ + For some small, directed, symmetric graphs, make sure that 1) they are + isomorphic to themselves, and 2) that only the identity mapping is + found. + """ + for node_data, edge_data in self.data: + graph = nx.Graph() + graph.add_nodes_from(node_data) + graph.add_edges_from(edge_data) + + ismags = iso.ISMAGS( + graph, graph, node_match=iso.categorical_node_match("name", None) + ) + assert ismags.is_isomorphic() + assert ismags.subgraph_is_isomorphic() + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in graph.nodes} + ] + + +class TestSubgraphIsomorphism: + def test_isomorphism(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(4)) + + g2 = nx.Graph() + nx.add_cycle(g2, range(4)) + g2.add_edges_from(list(zip(g2, range(4, 8)))) + ismags = iso.ISMAGS(g2, g1) + assert list(ismags.subgraph_isomorphisms_iter(symmetry=True)) == [ + {n: n for n in g1.nodes} + ] + + def test_isomorphism2(self): + g1 = nx.Graph() + nx.add_path(g1, range(3)) + + g2 = g1.copy() + g2.add_edge(1, 3) + + ismags = iso.ISMAGS(g2, g1) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [ + {0: 0, 1: 1, 2: 2}, + {0: 0, 1: 1, 3: 2}, + {2: 0, 1: 1, 3: 2}, + ] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [ + {0: 2, 1: 1, 2: 0}, + {0: 2, 1: 1, 3: 0}, + {2: 2, 1: 1, 3: 0}, + ] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + def test_labeled_nodes(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(3)) + g1.nodes[1]["attr"] = True + + g2 = g1.copy() + g2.add_edge(1, 3) + ismags = iso.ISMAGS(g2, g1, node_match=lambda x, y: x == y) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [{0: 0, 1: 1, 2: 2}] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [{0: 2, 1: 1, 2: 0}] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + def test_labeled_edges(self): + g1 = nx.Graph() + nx.add_cycle(g1, range(3)) + g1.edges[1, 2]["attr"] = True + + g2 = g1.copy() + g2.add_edge(1, 3) + ismags = iso.ISMAGS(g2, g1, edge_match=lambda x, y: x == y) + matches = ismags.subgraph_isomorphisms_iter(symmetry=True) + expected_symmetric = [{0: 0, 1: 1, 2: 2}] + assert _matches_to_sets(matches) == _matches_to_sets(expected_symmetric) + + matches = ismags.subgraph_isomorphisms_iter(symmetry=False) + expected_asymmetric = [{1: 2, 0: 0, 2: 1}] + assert _matches_to_sets(matches) == _matches_to_sets( + expected_symmetric + expected_asymmetric + ) + + +class TestWikipediaExample: + # Nodes 'a', 'b', 'c' and 'd' form a column. + # Nodes 'g', 'h', 'i' and 'j' form a column. + g1edges = [ + ["a", "g"], + ["a", "h"], + ["a", "i"], + ["b", "g"], + ["b", "h"], + ["b", "j"], + ["c", "g"], + ["c", "i"], + ["c", "j"], + ["d", "h"], + ["d", "i"], + ["d", "j"], + ] + + # Nodes 1,2,3,4 form the clockwise corners of a large square. + # Nodes 5,6,7,8 form the clockwise corners of a small square + g2edges = [ + [1, 2], + [2, 3], + [3, 4], + [4, 1], + [5, 6], + [6, 7], + [7, 8], + [8, 5], + [1, 5], + [2, 6], + [3, 7], + [4, 8], + ] + + def test_graph(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from(self.g2edges) + gm = iso.ISMAGS(g1, g2) + assert gm.is_isomorphic() + + +class TestLargestCommonSubgraph: + def test_mcis(self): + # Example graphs from DOI: 10.1002/spe.588 + graph1 = nx.Graph() + graph1.add_edges_from([(1, 2), (2, 3), (2, 4), (3, 4), (4, 5)]) + graph1.nodes[1]["color"] = 0 + + graph2 = nx.Graph() + graph2.add_edges_from( + [(1, 2), (2, 3), (2, 4), (3, 4), (3, 5), (5, 6), (5, 7), (6, 7)] + ) + graph2.nodes[1]["color"] = 1 + graph2.nodes[6]["color"] = 2 + graph2.nodes[7]["color"] = 2 + + ismags = iso.ISMAGS( + graph1, graph2, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags.subgraph_isomorphisms_iter(True)) == [] + assert list(ismags.subgraph_isomorphisms_iter(False)) == [] + found_mcis = _matches_to_sets(ismags.largest_common_subgraph()) + expected = _matches_to_sets( + [{2: 2, 3: 4, 4: 3, 5: 5}, {2: 4, 3: 2, 4: 3, 5: 5}] + ) + assert expected == found_mcis + + ismags = iso.ISMAGS( + graph2, graph1, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags.subgraph_isomorphisms_iter(True)) == [] + assert list(ismags.subgraph_isomorphisms_iter(False)) == [] + found_mcis = _matches_to_sets(ismags.largest_common_subgraph()) + # Same answer, but reversed. + expected = _matches_to_sets( + [{2: 2, 3: 4, 4: 3, 5: 5}, {4: 2, 2: 3, 3: 4, 5: 5}] + ) + assert expected == found_mcis + + def test_symmetry_mcis(self): + graph1 = nx.Graph() + nx.add_path(graph1, range(4)) + + graph2 = nx.Graph() + nx.add_path(graph2, range(3)) + graph2.add_edge(1, 3) + + # Only the symmetry of graph2 is taken into account here. + ismags1 = iso.ISMAGS( + graph1, graph2, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags1.subgraph_isomorphisms_iter(True)) == [] + found_mcis = _matches_to_sets(ismags1.largest_common_subgraph()) + expected = _matches_to_sets([{0: 0, 1: 1, 2: 2}, {1: 0, 3: 2, 2: 1}]) + assert expected == found_mcis + + # Only the symmetry of graph1 is taken into account here. + ismags2 = iso.ISMAGS( + graph2, graph1, node_match=iso.categorical_node_match("color", None) + ) + assert list(ismags2.subgraph_isomorphisms_iter(True)) == [] + found_mcis = _matches_to_sets(ismags2.largest_common_subgraph()) + expected = _matches_to_sets( + [ + {3: 2, 0: 0, 1: 1}, + {2: 0, 0: 2, 1: 1}, + {3: 0, 0: 2, 1: 1}, + {3: 0, 1: 1, 2: 2}, + {0: 0, 1: 1, 2: 2}, + {2: 0, 3: 2, 1: 1}, + ] + ) + + assert expected == found_mcis + + found_mcis1 = _matches_to_sets(ismags1.largest_common_subgraph(False)) + found_mcis2 = ismags2.largest_common_subgraph(False) + found_mcis2 = [{v: k for k, v in d.items()} for d in found_mcis2] + found_mcis2 = _matches_to_sets(found_mcis2) + + expected = _matches_to_sets( + [ + {3: 2, 1: 3, 2: 1}, + {2: 0, 0: 2, 1: 1}, + {1: 2, 3: 3, 2: 1}, + {3: 0, 1: 3, 2: 1}, + {0: 2, 2: 3, 1: 1}, + {3: 0, 1: 2, 2: 1}, + {2: 0, 0: 3, 1: 1}, + {0: 0, 2: 3, 1: 1}, + {1: 0, 3: 3, 2: 1}, + {1: 0, 3: 2, 2: 1}, + {0: 3, 1: 1, 2: 2}, + {0: 0, 1: 1, 2: 2}, + ] + ) + assert expected == found_mcis1 + assert expected == found_mcis2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py new file mode 100644 index 0000000..806c337 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphism.py @@ -0,0 +1,109 @@ +from functools import partial + +import pytest + +import networkx as nx +from networkx.algorithms import isomorphism as iso + +# Convenience functions for testing that the behavior of `could_be_isomorphic` +# with the "properties" kwarg is equivalent to the corresponding function (i.e. +# nx.fast_could_be_isomorphic or nx.faster_could_be_isomorphic) +fast_cbi = partial(nx.could_be_isomorphic, properties="dt") +faster_cbi = partial(nx.could_be_isomorphic, properties="d") + + +def test_graph_could_be_isomorphic_variants_deprecated(): + G1 = nx.Graph([(1, 2), (1, 3), (1, 5), (2, 3)]) + G2 = nx.Graph([(10, 20), (20, 30), (10, 30), (10, 50)]) + with pytest.deprecated_call(): # graph_could_be_isomorphic + result = nx.isomorphism.isomorph.graph_could_be_isomorphic(G1, G2) + assert nx.could_be_isomorphic(G1, G2) == result + with pytest.deprecated_call(): # fast_graph_could_be_isomorphic + result = nx.isomorphism.isomorph.fast_graph_could_be_isomorphic(G1, G2) + assert nx.fast_could_be_isomorphic(G1, G2) == result + with pytest.deprecated_call(): + result = nx.isomorphism.isomorph.faster_graph_could_be_isomorphic(G1, G2) + assert nx.faster_could_be_isomorphic(G1, G2) == result + + +@pytest.mark.parametrize("atlas_ids", [(699, 706), (864, 870)]) +def test_could_be_isomorphic_combined_properties(atlas_ids): + """There are two pairs of graphs from the graph atlas that have the same + combined degree-triangle distribution, but a different maximal clique + distribution. See gh-7852.""" + G, H = (nx.graph_atlas(idx) for idx in atlas_ids) + + assert not nx.is_isomorphic(G, H) + + # Degree only + assert nx.faster_could_be_isomorphic(G, H) + assert nx.could_be_isomorphic(G, H, properties="d") + # Degrees & triangles + assert nx.fast_could_be_isomorphic(G, H) + assert nx.could_be_isomorphic(G, H, properties="dt") + # Full properties table (degrees, triangles, cliques) + assert not nx.could_be_isomorphic(G, H) + assert not nx.could_be_isomorphic(G, H, properties="dtc") + # For these two cases, the clique distribution alone is enough to verify + # the graphs can't be isomorphic + assert not nx.could_be_isomorphic(G, H, properties="c") + + +def test_could_be_isomorphic_individual_vs_combined_dt(): + """A test case where G and H have identical degree and triangle distributions, + but are different when compared together""" + G = nx.Graph([(0, 1), (0, 2), (0, 3), (0, 4), (1, 2), (3, 4), (4, 5), (4, 6)]) + H = G.copy() + # Modify graphs to produce different clique distributions + G.add_edge(0, 7) + H.add_edge(4, 7) + assert nx.could_be_isomorphic(G, H, properties="d") + assert nx.could_be_isomorphic(G, H, properties="t") + assert not nx.could_be_isomorphic(G, H, properties="dt") + assert not nx.could_be_isomorphic(G, H, properties="c") + + +class TestIsomorph: + @classmethod + def setup_class(cls): + cls.G1 = nx.Graph() + cls.G2 = nx.Graph() + cls.G3 = nx.Graph() + cls.G4 = nx.Graph() + cls.G5 = nx.Graph() + cls.G6 = nx.Graph() + cls.G1.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 3]]) + cls.G2.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50]]) + cls.G3.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 5]]) + cls.G4.add_edges_from([[1, 2], [1, 3], [1, 5], [2, 4]]) + cls.G5.add_edges_from([[1, 2], [1, 3]]) + cls.G6.add_edges_from([[10, 20], [20, 30], [10, 30], [10, 50], [20, 50]]) + + def test_could_be_isomorphic(self): + assert iso.could_be_isomorphic(self.G1, self.G2) + assert iso.could_be_isomorphic(self.G1, self.G3) + assert not iso.could_be_isomorphic(self.G1, self.G4) + assert iso.could_be_isomorphic(self.G3, self.G2) + assert not iso.could_be_isomorphic(self.G1, self.G6) + + @pytest.mark.parametrize("fn", (iso.fast_could_be_isomorphic, fast_cbi)) + def test_fast_could_be_isomorphic(self, fn): + assert fn(self.G3, self.G2) + assert not fn(self.G3, self.G5) + assert not fn(self.G1, self.G6) + + @pytest.mark.parametrize("fn", (iso.faster_could_be_isomorphic, faster_cbi)) + def test_faster_could_be_isomorphic(self, fn): + assert fn(self.G3, self.G2) + assert not fn(self.G3, self.G5) + assert not fn(self.G1, self.G6) + + def test_is_isomorphic(self): + assert iso.is_isomorphic(self.G1, self.G2) + assert not iso.is_isomorphic(self.G1, self.G4) + assert iso.is_isomorphic(self.G1.to_directed(), self.G2.to_directed()) + assert not iso.is_isomorphic(self.G1.to_directed(), self.G4.to_directed()) + with pytest.raises( + nx.NetworkXError, match="Graphs G1 and G2 are not of the same type." + ): + iso.is_isomorphic(self.G1.to_directed(), self.G1) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py new file mode 100644 index 0000000..9a351ed --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py @@ -0,0 +1,439 @@ +""" +Tests for VF2 isomorphism algorithm. +""" + +import importlib.resources +import random +import struct + +import pytest + +import networkx as nx +from networkx.algorithms import isomorphism as iso + + +class TestWikipediaExample: + # Source: https://en.wikipedia.org/wiki/Graph_isomorphism + + # Nodes 'a', 'b', 'c' and 'd' form a column. + # Nodes 'g', 'h', 'i' and 'j' form a column. + g1edges = [ + ["a", "g"], + ["a", "h"], + ["a", "i"], + ["b", "g"], + ["b", "h"], + ["b", "j"], + ["c", "g"], + ["c", "i"], + ["c", "j"], + ["d", "h"], + ["d", "i"], + ["d", "j"], + ] + + # Nodes 1,2,3,4 form the clockwise corners of a large square. + # Nodes 5,6,7,8 form the clockwise corners of a small square + g2edges = [ + [1, 2], + [2, 3], + [3, 4], + [4, 1], + [5, 6], + [6, 7], + [7, 8], + [8, 5], + [1, 5], + [2, 6], + [3, 7], + [4, 8], + ] + + def test_graph(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from(self.g2edges) + gm = iso.GraphMatcher(g1, g2) + assert gm.is_isomorphic() + # Just testing some cases + assert gm.subgraph_is_monomorphic() + + mapping = sorted(gm.mapping.items()) + + # this mapping is only one of the possibilities + # so this test needs to be reconsidered + # isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8), + # ('g', 2), ('h', 5), ('i', 4), ('j', 7)] + # assert_equal(mapping, isomap) + + def test_subgraph(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from(self.g2edges) + g3 = g2.subgraph([1, 2, 3, 4]) + gm = iso.GraphMatcher(g1, g3) + assert gm.subgraph_is_isomorphic() + + def test_subgraph_mono(self): + g1 = nx.Graph() + g2 = nx.Graph() + g1.add_edges_from(self.g1edges) + g2.add_edges_from([[1, 2], [2, 3], [3, 4]]) + gm = iso.GraphMatcher(g1, g2) + assert gm.subgraph_is_monomorphic() + + +class TestVF2GraphDB: + # https://web.archive.org/web/20090303210205/http://amalfi.dis.unina.it/graph/db/ + + @staticmethod + def create_graph(filename): + """Creates a Graph instance from the filename.""" + + # The file is assumed to be in the format from the VF2 graph database. + # Each file is composed of 16-bit numbers (unsigned short int). + # So we will want to read 2 bytes at a time. + + # We can read the number as follows: + # number = struct.unpack(' 0: + # get all the pairs of labels and nodes of children and sort by labels + # reverse=True to preserve DFS order, see gh-7945 + s = sorted(((label[u], u) for u in dT.successors(v)), reverse=True) + + # invert to give a list of two tuples + # the sorted labels, and the corresponding children + ordered_labels[v], ordered_children[v] = list(zip(*s)) + + # now collect and sort the sorted ordered_labels + # for all nodes in L[i], carrying along the node + forlabel = sorted((ordered_labels[v], v) for v in L[i]) + + # now assign labels to these nodes, according to the sorted order + # starting from 0, where identical ordered_labels get the same label + current = 0 + for i, (ol, v) in enumerate(forlabel): + # advance to next label if not 0, and different from previous + if (i != 0) and (ol != forlabel[i - 1][0]): + current += 1 + label[v] = current + + # they are isomorphic if the labels of newroot1 and newroot2 are 0 + isomorphism = [] + if label[newroot1] == 0 and label[newroot2] == 0: + # now lets get the isomorphism by walking the ordered_children + stack = [(newroot1, newroot2)] + while stack: + curr_v, curr_w = stack.pop() + isomorphism.append((curr_v, curr_w)) + stack.extend(zip(ordered_children[curr_v], ordered_children[curr_w])) + + # get the mapping back in terms of the old names + # return in sorted order for neatness + isomorphism = [(namemap[u], namemap[v]) for (u, v) in isomorphism] + + return isomorphism + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(graphs={"t1": 0, "t2": 1}) +def tree_isomorphism(t1, t2): + """ + Return an isomorphic mapping between two trees `t1` and `t2`. + + If `t1` and `t2` are not isomorphic, an empty list is returned. + Note that two trees may have more than one isomorphism, and this routine just + returns one valid mapping. + + Parameters + ---------- + t1 : undirected NetworkX graph + One of the trees being compared + + t2 : undirected NetworkX graph + The other tree being compared + + Returns + ------- + isomorphism : list + A list of pairs in which the left element is a node in `t1` + and the right element is a node in `t2`. The pairs are in + arbitrary order. If the nodes in one tree is mapped to the names in + the other, then trees will be identical. Note that an isomorphism + will not necessarily be unique. + + If `t1` and `t2` are not isomorphic, then it returns the empty list. + + Raises + ------ + NetworkXError + If either `t1` or `t2` is not a tree + + Notes + ----- + This runs in ``O(n*log(n))`` time for trees with ``n`` nodes. + """ + if not nx.is_tree(t1): + raise nx.NetworkXError("t1 is not a tree") + if not nx.is_tree(t2): + raise nx.NetworkXError("t2 is not a tree") + + # To be isomorphic, t1 and t2 must have the same number of nodes and sorted + # degree sequences + if not nx.faster_could_be_isomorphic(t1, t2): + return [] + + # A tree can have either 1 or 2 centers. + # If the number doesn't match then t1 and t2 are not isomorphic. + center1 = nx.center(t1) + center2 = nx.center(t2) + + if len(center1) != len(center2): + return [] + + # If there is only 1 center in each, then use it. + if len(center1) == 1: + return rooted_tree_isomorphism(t1, center1[0], t2, center2[0]) + + # If there both have 2 centers, then try the first for t1 + # with the first for t2. + attempts = rooted_tree_isomorphism(t1, center1[0], t2, center2[0]) + + # If that worked we're done. + if len(attempts) > 0: + return attempts + + # Otherwise, try center1[0] with the center2[1], and see if that works + return rooted_tree_isomorphism(t1, center1[0], t2, center2[1]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py new file mode 100644 index 0000000..f8d18ef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2pp.py @@ -0,0 +1,1102 @@ +""" +*************** +VF2++ Algorithm +*************** + +An implementation of the VF2++ algorithm [1]_ for Graph Isomorphism testing. + +The simplest interface to use this module is to call: + +`vf2pp_is_isomorphic`: to check whether two graphs are isomorphic. +`vf2pp_isomorphism`: to obtain the node mapping between two graphs, +in case they are isomorphic. +`vf2pp_all_isomorphisms`: to generate all possible mappings between two graphs, +if isomorphic. + +Introduction +------------ +The VF2++ algorithm, follows a similar logic to that of VF2, while also +introducing new easy-to-check cutting rules and determining the optimal access +order of nodes. It is also implemented in a non-recursive manner, which saves +both time and space, when compared to its previous counterpart. + +The optimal node ordering is obtained after taking into consideration both the +degree but also the label rarity of each node. +This way we place the nodes that are more likely to match, first in the order, +thus examining the most promising branches in the beginning. +The rules also consider node labels, making it easier to prune unfruitful +branches early in the process. + +Examples +-------- + +Suppose G1 and G2 are Isomorphic Graphs. Verification is as follows: + +Without node labels: + +>>> import networkx as nx +>>> G1 = nx.path_graph(4) +>>> G2 = nx.path_graph(4) +>>> nx.vf2pp_is_isomorphic(G1, G2, node_label=None) +True +>>> nx.vf2pp_isomorphism(G1, G2, node_label=None) +{1: 1, 2: 2, 0: 0, 3: 3} + +With node labels: + +>>> G1 = nx.path_graph(4) +>>> G2 = nx.path_graph(4) +>>> mapped = {1: 1, 2: 2, 3: 3, 0: 0} +>>> nx.set_node_attributes( +... G1, dict(zip(G1, ["blue", "red", "green", "yellow"])), "label" +... ) +>>> nx.set_node_attributes( +... G2, +... dict(zip([mapped[u] for u in G1], ["blue", "red", "green", "yellow"])), +... "label", +... ) +>>> nx.vf2pp_is_isomorphic(G1, G2, node_label="label") +True +>>> nx.vf2pp_isomorphism(G1, G2, node_label="label") +{1: 1, 2: 2, 0: 0, 3: 3} + +References +---------- +.. [1] Jüttner, Alpár & Madarasi, Péter. (2018). "VF2++—An improved subgraph + isomorphism algorithm". Discrete Applied Mathematics. 242. + https://doi.org/10.1016/j.dam.2018.02.018 + +""" + +import collections + +import networkx as nx + +__all__ = ["vf2pp_isomorphism", "vf2pp_is_isomorphic", "vf2pp_all_isomorphisms"] + +_GraphParameters = collections.namedtuple( + "_GraphParameters", + [ + "G1", + "G2", + "G1_labels", + "G2_labels", + "nodes_of_G1Labels", + "nodes_of_G2Labels", + "G2_nodes_of_degree", + ], +) + +_StateParameters = collections.namedtuple( + "_StateParameters", + [ + "mapping", + "reverse_mapping", + "T1", + "T1_in", + "T1_tilde", + "T1_tilde_in", + "T2", + "T2_in", + "T2_tilde", + "T2_tilde_in", + ], +) + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +def vf2pp_isomorphism(G1, G2, node_label=None, default_label=None): + """Return an isomorphic mapping between `G1` and `G2` if it exists. + + Parameters + ---------- + G1, G2 : NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism. + + node_label : str, optional + The name of the node attribute to be used when comparing nodes. + The default is `None`, meaning node attributes are not considered + in the comparison. Any node that doesn't have the `node_label` + attribute uses `default_label` instead. + + default_label : scalar + Default value to use when a node doesn't have an attribute + named `node_label`. Default is `None`. + + Returns + ------- + dict or None + Node mapping if the two graphs are isomorphic. None otherwise. + """ + try: + mapping = next(vf2pp_all_isomorphisms(G1, G2, node_label, default_label)) + return mapping + except StopIteration: + return None + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +def vf2pp_is_isomorphic(G1, G2, node_label=None, default_label=None): + """Examines whether G1 and G2 are isomorphic. + + Parameters + ---------- + G1, G2 : NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism. + + node_label : str, optional + The name of the node attribute to be used when comparing nodes. + The default is `None`, meaning node attributes are not considered + in the comparison. Any node that doesn't have the `node_label` + attribute uses `default_label` instead. + + default_label : scalar + Default value to use when a node doesn't have an attribute + named `node_label`. Default is `None`. + + Returns + ------- + bool + True if the two graphs are isomorphic, False otherwise. + """ + if vf2pp_isomorphism(G1, G2, node_label, default_label) is not None: + return True + return False + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}, node_attrs={"node_label": "default_label"}) +def vf2pp_all_isomorphisms(G1, G2, node_label=None, default_label=None): + """Yields all the possible mappings between G1 and G2. + + Parameters + ---------- + G1, G2 : NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism. + + node_label : str, optional + The name of the node attribute to be used when comparing nodes. + The default is `None`, meaning node attributes are not considered + in the comparison. Any node that doesn't have the `node_label` + attribute uses `default_label` instead. + + default_label : scalar + Default value to use when a node doesn't have an attribute + named `node_label`. Default is `None`. + + Yields + ------ + dict + Isomorphic mapping between the nodes in `G1` and `G2`. + """ + if G1.number_of_nodes() == 0 or G2.number_of_nodes() == 0: + return False + + # Create the degree dicts based on graph type + if G1.is_directed(): + G1_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(G1.in_degree, G1.out_degree) + } + G2_degree = { + n: (in_degree, out_degree) + for (n, in_degree), (_, out_degree) in zip(G2.in_degree, G2.out_degree) + } + else: + G1_degree = dict(G1.degree) + G2_degree = dict(G2.degree) + + if not G1.is_directed(): + find_candidates = _find_candidates + restore_Tinout = _restore_Tinout + else: + find_candidates = _find_candidates_Di + restore_Tinout = _restore_Tinout_Di + + # Check that both graphs have the same number of nodes and degree sequence + if G1.order() != G2.order(): + return False + if sorted(G1_degree.values()) != sorted(G2_degree.values()): + return False + + # Initialize parameters and cache necessary information about degree and labels + graph_params, state_params = _initialize_parameters( + G1, G2, G2_degree, node_label, default_label + ) + + # Check if G1 and G2 have the same labels, and that number of nodes per label + # is equal between the two graphs + if not _precheck_label_properties(graph_params): + return False + + # Calculate the optimal node ordering + node_order = _matching_order(graph_params) + + # Initialize the stack + stack = [] + candidates = iter( + find_candidates(node_order[0], graph_params, state_params, G1_degree) + ) + stack.append((node_order[0], candidates)) + + mapping = state_params.mapping + reverse_mapping = state_params.reverse_mapping + + # Index of the node from the order, currently being examined + matching_node = 1 + + while stack: + current_node, candidate_nodes = stack[-1] + + try: + candidate = next(candidate_nodes) + except StopIteration: + # If no remaining candidates, return to a previous state, and follow another branch + stack.pop() + matching_node -= 1 + if stack: + # Pop the previously added u-v pair, and look for a different candidate _v for u + popped_node1, _ = stack[-1] + popped_node2 = mapping[popped_node1] + mapping.pop(popped_node1) + reverse_mapping.pop(popped_node2) + restore_Tinout(popped_node1, popped_node2, graph_params, state_params) + continue + + if _feasibility(current_node, candidate, graph_params, state_params): + # Terminate if mapping is extended to its full + if len(mapping) == G2.number_of_nodes() - 1: + cp_mapping = mapping.copy() + cp_mapping[current_node] = candidate + yield cp_mapping + continue + + # Feasibility rules pass, so extend the mapping and update the parameters + mapping[current_node] = candidate + reverse_mapping[candidate] = current_node + _update_Tinout(current_node, candidate, graph_params, state_params) + # Append the next node and its candidates to the stack + candidates = iter( + find_candidates( + node_order[matching_node], graph_params, state_params, G1_degree + ) + ) + stack.append((node_order[matching_node], candidates)) + matching_node += 1 + + +def _precheck_label_properties(graph_params): + G1, G2, G1_labels, G2_labels, nodes_of_G1Labels, nodes_of_G2Labels, _ = graph_params + if any( + label not in nodes_of_G1Labels or len(nodes_of_G1Labels[label]) != len(nodes) + for label, nodes in nodes_of_G2Labels.items() + ): + return False + return True + + +def _initialize_parameters(G1, G2, G2_degree, node_label=None, default_label=-1): + """Initializes all the necessary parameters for VF2++ + + Parameters + ---------- + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + Returns + ------- + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2 + G1_labels,G2_labels: dict + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. + It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes + that are not in the mapping, but are neighbors of nodes that are. + + T1_out, T2_out: set + Ti_out contains all the nodes from Gi, that are neither in the mapping + nor in Ti + """ + G1_labels = dict(G1.nodes(data=node_label, default=default_label)) + G2_labels = dict(G2.nodes(data=node_label, default=default_label)) + + graph_params = _GraphParameters( + G1, + G2, + G1_labels, + G2_labels, + nx.utils.groups(G1_labels), + nx.utils.groups(G2_labels), + nx.utils.groups(G2_degree), + ) + + T1, T1_in = set(), set() + T2, T2_in = set(), set() + if G1.is_directed(): + T1_tilde, T1_tilde_in = ( + set(G1.nodes()), + set(), + ) # todo: do we need Ti_tilde_in? What nodes does it have? + T2_tilde, T2_tilde_in = set(G2.nodes()), set() + else: + T1_tilde, T1_tilde_in = set(G1.nodes()), set() + T2_tilde, T2_tilde_in = set(G2.nodes()), set() + + state_params = _StateParameters( + {}, + {}, + T1, + T1_in, + T1_tilde, + T1_tilde_in, + T2, + T2_in, + T2_tilde, + T2_tilde_in, + ) + + return graph_params, state_params + + +def _matching_order(graph_params): + """The node ordering as introduced in VF2++. + + Notes + ----- + Taking into account the structure of the Graph and the node labeling, the + nodes are placed in an order such that, most of the unfruitful/infeasible + branches of the search space can be pruned on high levels, significantly + decreasing the number of visited states. The premise is that, the algorithm + will be able to recognize inconsistencies early, proceeding to go deep into + the search tree only if it's needed. + + Parameters + ---------- + graph_params: namedtuple + Contains: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism. + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively. + + Returns + ------- + node_order: list + The ordering of the nodes. + """ + G1, G2, G1_labels, _, _, nodes_of_G2Labels, _ = graph_params + if not G1 and not G2: + return {} + + if G1.is_directed(): + G1 = G1.to_undirected(as_view=True) + + V1_unordered = set(G1.nodes()) + label_rarity = {label: len(nodes) for label, nodes in nodes_of_G2Labels.items()} + used_degrees = dict.fromkeys(G1, 0) + node_order = [] + + while V1_unordered: + max_rarity = min(label_rarity[G1_labels[x]] for x in V1_unordered) + rarest_nodes = [ + n for n in V1_unordered if label_rarity[G1_labels[n]] == max_rarity + ] + max_node = max(rarest_nodes, key=G1.degree) + + for dlevel_nodes in nx.bfs_layers(G1, max_node): + nodes_to_add = dlevel_nodes.copy() + while nodes_to_add: + max_used_degree = max(used_degrees[n] for n in nodes_to_add) + max_used_degree_nodes = [ + n for n in nodes_to_add if used_degrees[n] == max_used_degree + ] + max_degree = max(G1.degree[n] for n in max_used_degree_nodes) + max_degree_nodes = [ + n for n in max_used_degree_nodes if G1.degree[n] == max_degree + ] + next_node = min( + max_degree_nodes, key=lambda x: label_rarity[G1_labels[x]] + ) + + node_order.append(next_node) + for node in G1.neighbors(next_node): + used_degrees[node] += 1 + + nodes_to_add.remove(next_node) + label_rarity[G1_labels[next_node]] -= 1 + V1_unordered.discard(next_node) + + return node_order + + +def _find_candidates( + u, graph_params, state_params, G1_degree +): # todo: make the 4th argument the degree of u + """Given node u of G1, finds the candidates of u from G2. + + Parameters + ---------- + u: Graph node + The node from G1 for which to find the candidates from G2. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes + of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes + that are not in the mapping, but are neighbors of nodes that are. + + T1_tilde, T2_tilde: set + Ti_tilde contains all the nodes from Gi, that are neither in the + mapping nor in Ti + + Returns + ------- + candidates: set + The nodes from G2 which are candidates for u. + """ + G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params + mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params + + covered_nbrs = [nbr for nbr in G1[u] if nbr in mapping] + if not covered_nbrs: + candidates = set(nodes_of_G2Labels[G1_labels[u]]) + candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]]) + candidates.intersection_update(T2_tilde) + candidates.difference_update(reverse_mapping) + if G1.is_multigraph(): + candidates.difference_update( + { + node + for node in candidates + if G1.number_of_edges(u, u) != G2.number_of_edges(node, node) + } + ) + return candidates + + nbr1 = covered_nbrs[0] + common_nodes = set(G2[mapping[nbr1]]) + + for nbr1 in covered_nbrs[1:]: + common_nodes.intersection_update(G2[mapping[nbr1]]) + + common_nodes.difference_update(reverse_mapping) + common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]]) + common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]]) + if G1.is_multigraph(): + common_nodes.difference_update( + { + node + for node in common_nodes + if G1.number_of_edges(u, u) != G2.number_of_edges(node, node) + } + ) + return common_nodes + + +def _find_candidates_Di(u, graph_params, state_params, G1_degree): + G1, G2, G1_labels, _, _, nodes_of_G2Labels, G2_nodes_of_degree = graph_params + mapping, reverse_mapping, _, _, _, _, _, _, T2_tilde, _ = state_params + + covered_successors = [succ for succ in G1[u] if succ in mapping] + covered_predecessors = [pred for pred in G1.pred[u] if pred in mapping] + + if not (covered_successors or covered_predecessors): + candidates = set(nodes_of_G2Labels[G1_labels[u]]) + candidates.intersection_update(G2_nodes_of_degree[G1_degree[u]]) + candidates.intersection_update(T2_tilde) + candidates.difference_update(reverse_mapping) + if G1.is_multigraph(): + candidates.difference_update( + { + node + for node in candidates + if G1.number_of_edges(u, u) != G2.number_of_edges(node, node) + } + ) + return candidates + + if covered_successors: + succ1 = covered_successors[0] + common_nodes = set(G2.pred[mapping[succ1]]) + + for succ1 in covered_successors[1:]: + common_nodes.intersection_update(G2.pred[mapping[succ1]]) + else: + pred1 = covered_predecessors.pop() + common_nodes = set(G2[mapping[pred1]]) + + for pred1 in covered_predecessors: + common_nodes.intersection_update(G2[mapping[pred1]]) + + common_nodes.difference_update(reverse_mapping) + common_nodes.intersection_update(G2_nodes_of_degree[G1_degree[u]]) + common_nodes.intersection_update(nodes_of_G2Labels[G1_labels[u]]) + if G1.is_multigraph(): + common_nodes.difference_update( + { + node + for node in common_nodes + if G1.number_of_edges(u, u) != G2.number_of_edges(node, node) + } + ) + return common_nodes + + +def _feasibility(node1, node2, graph_params, state_params): + """Given a candidate pair of nodes u and v from G1 and G2 respectively, + checks if it's feasible to extend the mapping, i.e. if u and v can be matched. + + Notes + ----- + This function performs all the necessary checking by applying both consistency + and cutting rules. + + Parameters + ---------- + node1, node2: Graph node + The candidate pair of nodes being checked for matching + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes + of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes + that are not in the mapping, but are neighbors of nodes that are. + + T1_out, T2_out: set + Ti_out contains all the nodes from Gi, that are neither in the mapping + nor in Ti + + Returns + ------- + True if all checks are successful, False otherwise. + """ + G1 = graph_params.G1 + + if _cut_PT(node1, node2, graph_params, state_params): + return False + + if G1.is_multigraph(): + if not _consistent_PT(node1, node2, graph_params, state_params): + return False + + return True + + +def _cut_PT(u, v, graph_params, state_params): + """Implements the cutting rules for the ISO problem. + + Parameters + ---------- + u, v: Graph node + The two candidate nodes being examined. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes + of G1. It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes + that are not in the mapping, but are neighbors of nodes that are. + + T1_tilde, T2_tilde: set + Ti_out contains all the nodes from Gi, that are neither in the + mapping nor in Ti + + Returns + ------- + True if we should prune this branch, i.e. the node pair failed the cutting checks. False otherwise. + """ + G1, G2, G1_labels, G2_labels, _, _, _ = graph_params + ( + _, + _, + T1, + T1_in, + T1_tilde, + _, + T2, + T2_in, + T2_tilde, + _, + ) = state_params + + u_labels_predecessors, v_labels_predecessors = {}, {} + if G1.is_directed(): + u_labels_predecessors = nx.utils.groups( + {n1: G1_labels[n1] for n1 in G1.pred[u]} + ) + v_labels_predecessors = nx.utils.groups( + {n2: G2_labels[n2] for n2 in G2.pred[v]} + ) + + if set(u_labels_predecessors.keys()) != set(v_labels_predecessors.keys()): + return True + + u_labels_successors = nx.utils.groups({n1: G1_labels[n1] for n1 in G1[u]}) + v_labels_successors = nx.utils.groups({n2: G2_labels[n2] for n2 in G2[v]}) + + # if the neighbors of u, do not have the same labels as those of v, NOT feasible. + if set(u_labels_successors.keys()) != set(v_labels_successors.keys()): + return True + + for label, G1_nbh in u_labels_successors.items(): + G2_nbh = v_labels_successors[label] + + if G1.is_multigraph(): + # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2 + u_nbrs_edges = sorted(G1.number_of_edges(u, x) for x in G1_nbh) + v_nbrs_edges = sorted(G2.number_of_edges(v, x) for x in G2_nbh) + if any( + u_nbr_edges != v_nbr_edges + for u_nbr_edges, v_nbr_edges in zip(u_nbrs_edges, v_nbrs_edges) + ): + return True + + if len(T1.intersection(G1_nbh)) != len(T2.intersection(G2_nbh)): + return True + if len(T1_tilde.intersection(G1_nbh)) != len(T2_tilde.intersection(G2_nbh)): + return True + if G1.is_directed() and len(T1_in.intersection(G1_nbh)) != len( + T2_in.intersection(G2_nbh) + ): + return True + + if not G1.is_directed(): + return False + + for label, G1_pred in u_labels_predecessors.items(): + G2_pred = v_labels_predecessors[label] + + if G1.is_multigraph(): + # Check for every neighbor in the neighborhood, if u-nbr1 has same edges as v-nbr2 + u_pred_edges = sorted(G1.number_of_edges(u, x) for x in G1_pred) + v_pred_edges = sorted(G2.number_of_edges(v, x) for x in G2_pred) + if any( + u_nbr_edges != v_nbr_edges + for u_nbr_edges, v_nbr_edges in zip(u_pred_edges, v_pred_edges) + ): + return True + + if len(T1.intersection(G1_pred)) != len(T2.intersection(G2_pred)): + return True + if len(T1_tilde.intersection(G1_pred)) != len(T2_tilde.intersection(G2_pred)): + return True + if len(T1_in.intersection(G1_pred)) != len(T2_in.intersection(G2_pred)): + return True + + return False + + +def _consistent_PT(u, v, graph_params, state_params): + """Checks the consistency of extending the mapping using the current node pair. + + Parameters + ---------- + u, v: Graph node + The two candidate nodes being examined. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. + It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes + that are not in the mapping, but are neighbors of nodes that are. + + T1_out, T2_out: set + Ti_out contains all the nodes from Gi, that are neither in the mapping + nor in Ti + + Returns + ------- + True if the pair passes all the consistency checks successfully. False otherwise. + """ + G1, G2 = graph_params.G1, graph_params.G2 + mapping, reverse_mapping = state_params.mapping, state_params.reverse_mapping + + for neighbor in G1[u]: + if neighbor in mapping: + if G1.number_of_edges(u, neighbor) != G2.number_of_edges( + v, mapping[neighbor] + ): + return False + + for neighbor in G2[v]: + if neighbor in reverse_mapping: + if G1.number_of_edges(u, reverse_mapping[neighbor]) != G2.number_of_edges( + v, neighbor + ): + return False + + if not G1.is_directed(): + return True + + for predecessor in G1.pred[u]: + if predecessor in mapping: + if G1.number_of_edges(predecessor, u) != G2.number_of_edges( + mapping[predecessor], v + ): + return False + + for predecessor in G2.pred[v]: + if predecessor in reverse_mapping: + if G1.number_of_edges( + reverse_mapping[predecessor], u + ) != G2.number_of_edges(predecessor, v): + return False + + return True + + +def _update_Tinout(new_node1, new_node2, graph_params, state_params): + """Updates the Ti/Ti_out (i=1,2) when a new node pair u-v is added to the mapping. + + Notes + ----- + This function should be called right after the feasibility checks are passed, + and node1 is mapped to node2. The purpose of this function is to avoid brute + force computing of Ti/Ti_out by iterating over all nodes of the graph and + checking which nodes satisfy the necessary conditions. Instead, in every step + of the algorithm we focus exclusively on the two nodes that are being added + to the mapping, incrementally updating Ti/Ti_out. + + Parameters + ---------- + new_node1, new_node2: Graph node + The two new nodes, added to the mapping. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. + It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes + that are not in the mapping, but are neighbors of nodes that are. + + T1_tilde, T2_tilde: set + Ti_out contains all the nodes from Gi, that are neither in the mapping nor in Ti + """ + G1, G2, _, _, _, _, _ = graph_params + ( + mapping, + reverse_mapping, + T1, + T1_in, + T1_tilde, + T1_tilde_in, + T2, + T2_in, + T2_tilde, + T2_tilde_in, + ) = state_params + + uncovered_successors_G1 = {succ for succ in G1[new_node1] if succ not in mapping} + uncovered_successors_G2 = { + succ for succ in G2[new_node2] if succ not in reverse_mapping + } + + # Add the uncovered neighbors of node1 and node2 in T1 and T2 respectively + T1.update(uncovered_successors_G1) + T2.update(uncovered_successors_G2) + T1.discard(new_node1) + T2.discard(new_node2) + + T1_tilde.difference_update(uncovered_successors_G1) + T2_tilde.difference_update(uncovered_successors_G2) + T1_tilde.discard(new_node1) + T2_tilde.discard(new_node2) + + if not G1.is_directed(): + return + + uncovered_predecessors_G1 = { + pred for pred in G1.pred[new_node1] if pred not in mapping + } + uncovered_predecessors_G2 = { + pred for pred in G2.pred[new_node2] if pred not in reverse_mapping + } + + T1_in.update(uncovered_predecessors_G1) + T2_in.update(uncovered_predecessors_G2) + T1_in.discard(new_node1) + T2_in.discard(new_node2) + + T1_tilde.difference_update(uncovered_predecessors_G1) + T2_tilde.difference_update(uncovered_predecessors_G2) + T1_tilde.discard(new_node1) + T2_tilde.discard(new_node2) + + +def _restore_Tinout(popped_node1, popped_node2, graph_params, state_params): + """Restores the previous version of Ti/Ti_out when a node pair is deleted + from the mapping. + + Parameters + ---------- + popped_node1, popped_node2: Graph node + The two nodes deleted from the mapping. + + graph_params: namedtuple + Contains all the Graph-related parameters: + + G1,G2: NetworkX Graph or MultiGraph instances. + The two graphs to check for isomorphism or monomorphism + + G1_labels,G2_labels: dict + The label of every node in G1 and G2 respectively + + state_params: namedtuple + Contains all the State-related parameters: + + mapping: dict + The mapping as extended so far. Maps nodes of G1 to nodes of G2 + + reverse_mapping: dict + The reverse mapping as extended so far. Maps nodes from G2 to nodes of G1. + It's basically "mapping" reversed + + T1, T2: set + Ti contains uncovered neighbors of covered nodes from Gi, i.e. nodes + that are not in the mapping, but are neighbors of nodes that are. + + T1_tilde, T2_tilde: set + Ti_out contains all the nodes from Gi, that are neither in the mapping + nor in Ti + """ + # If the node we want to remove from the mapping, has at least one covered + # neighbor, add it to T1. + G1, G2, _, _, _, _, _ = graph_params + ( + mapping, + reverse_mapping, + T1, + T1_in, + T1_tilde, + T1_tilde_in, + T2, + T2_in, + T2_tilde, + T2_tilde_in, + ) = state_params + + is_added = False + for neighbor in G1[popped_node1]: + if neighbor in mapping: + # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1 + is_added = True + T1.add(popped_node1) + else: + # check if its neighbor has another connection with a covered node. + # If not, only then exclude it from T1 + if any(nbr in mapping for nbr in G1[neighbor]): + continue + T1.discard(neighbor) + T1_tilde.add(neighbor) + + # Case where the node is not present in neither the mapping nor T1. + # By definition, it should belong to T1_tilde + if not is_added: + T1_tilde.add(popped_node1) + + is_added = False + for neighbor in G2[popped_node2]: + if neighbor in reverse_mapping: + is_added = True + T2.add(popped_node2) + else: + if any(nbr in reverse_mapping for nbr in G2[neighbor]): + continue + T2.discard(neighbor) + T2_tilde.add(neighbor) + + if not is_added: + T2_tilde.add(popped_node2) + + +def _restore_Tinout_Di(popped_node1, popped_node2, graph_params, state_params): + # If the node we want to remove from the mapping, has at least one covered neighbor, add it to T1. + G1, G2, _, _, _, _, _ = graph_params + ( + mapping, + reverse_mapping, + T1, + T1_in, + T1_tilde, + T1_tilde_in, + T2, + T2_in, + T2_tilde, + T2_tilde_in, + ) = state_params + + is_added = False + for successor in G1[popped_node1]: + if successor in mapping: + # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1 + is_added = True + T1_in.add(popped_node1) + else: + # check if its neighbor has another connection with a covered node. + # If not, only then exclude it from T1 + if not any(pred in mapping for pred in G1.pred[successor]): + T1.discard(successor) + + if not any(succ in mapping for succ in G1[successor]): + T1_in.discard(successor) + + if successor not in T1: + if successor not in T1_in: + T1_tilde.add(successor) + + for predecessor in G1.pred[popped_node1]: + if predecessor in mapping: + # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1 + is_added = True + T1.add(popped_node1) + else: + # check if its neighbor has another connection with a covered node. + # If not, only then exclude it from T1 + if not any(pred in mapping for pred in G1.pred[predecessor]): + T1.discard(predecessor) + + if not any(succ in mapping for succ in G1[predecessor]): + T1_in.discard(predecessor) + + if not (predecessor in T1 or predecessor in T1_in): + T1_tilde.add(predecessor) + + # Case where the node is not present in neither the mapping nor T1. + # By definition it should belong to T1_tilde + if not is_added: + T1_tilde.add(popped_node1) + + is_added = False + for successor in G2[popped_node2]: + if successor in reverse_mapping: + is_added = True + T2_in.add(popped_node2) + else: + if not any(pred in reverse_mapping for pred in G2.pred[successor]): + T2.discard(successor) + + if not any(succ in reverse_mapping for succ in G2[successor]): + T2_in.discard(successor) + + if successor not in T2: + if successor not in T2_in: + T2_tilde.add(successor) + + for predecessor in G2.pred[popped_node2]: + if predecessor in reverse_mapping: + # if a neighbor of the excluded node1 is in the mapping, keep node1 in T1 + is_added = True + T2.add(popped_node2) + else: + # check if its neighbor has another connection with a covered node. + # If not, only then exclude it from T1 + if not any(pred in reverse_mapping for pred in G2.pred[predecessor]): + T2.discard(predecessor) + + if not any(succ in reverse_mapping for succ in G2[predecessor]): + T2_in.discard(predecessor) + + if not (predecessor in T2 or predecessor in T2_in): + T2_tilde.add(predecessor) + + if not is_added: + T2_tilde.add(popped_node2) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py new file mode 100644 index 0000000..6fcf8a1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/isomorphism/vf2userfunc.py @@ -0,0 +1,192 @@ +""" +Module to simplify the specification of user-defined equality functions for +node and edge attributes during isomorphism checks. + +During the construction of an isomorphism, the algorithm considers two +candidate nodes n1 in G1 and n2 in G2. The graphs G1 and G2 are then +compared with respect to properties involving n1 and n2, and if the outcome +is good, then the candidate nodes are considered isomorphic. NetworkX +provides a simple mechanism for users to extend the comparisons to include +node and edge attributes. + +Node attributes are handled by the node_match keyword. When considering +n1 and n2, the algorithm passes their node attribute dictionaries to +node_match, and if it returns False, then n1 and n2 cannot be +considered to be isomorphic. + +Edge attributes are handled by the edge_match keyword. When considering +n1 and n2, the algorithm must verify that outgoing edges from n1 are +commensurate with the outgoing edges for n2. If the graph is directed, +then a similar check is also performed for incoming edges. + +Focusing only on outgoing edges, we consider pairs of nodes (n1, v1) from +G1 and (n2, v2) from G2. For graphs and digraphs, there is only one edge +between (n1, v1) and only one edge between (n2, v2). Those edge attribute +dictionaries are passed to edge_match, and if it returns False, then +n1 and n2 cannot be considered isomorphic. For multigraphs and +multidigraphs, there can be multiple edges between (n1, v1) and also +multiple edges between (n2, v2). Now, there must exist an isomorphism +from "all the edges between (n1, v1)" to "all the edges between (n2, v2)". +So, all of the edge attribute dictionaries are passed to edge_match, and +it must determine if there is an isomorphism between the two sets of edges. +""" + +from . import isomorphvf2 as vf2 + +__all__ = ["GraphMatcher", "DiGraphMatcher", "MultiGraphMatcher", "MultiDiGraphMatcher"] + + +def _semantic_feasibility(self, G1_node, G2_node): + """Returns True if mapping G1_node to G2_node is semantically feasible.""" + # Make sure the nodes match + if self.node_match is not None: + nm = self.node_match(self.G1.nodes[G1_node], self.G2.nodes[G2_node]) + if not nm: + return False + + # Make sure the edges match + if self.edge_match is not None: + # Cached lookups + G1nbrs = self.G1_adj[G1_node] + G2nbrs = self.G2_adj[G2_node] + core_1 = self.core_1 + edge_match = self.edge_match + + for neighbor in G1nbrs: + # G1_node is not in core_1, so we must handle R_self separately + if neighbor == G1_node: + if G2_node in G2nbrs and not edge_match( + G1nbrs[G1_node], G2nbrs[G2_node] + ): + return False + elif neighbor in core_1: + G2_nbr = core_1[neighbor] + if G2_nbr in G2nbrs and not edge_match( + G1nbrs[neighbor], G2nbrs[G2_nbr] + ): + return False + # syntactic check has already verified that neighbors are symmetric + + return True + + +class GraphMatcher(vf2.GraphMatcher): + """VF2 isomorphism checker for undirected graphs.""" + + def __init__(self, G1, G2, node_match=None, edge_match=None): + """Initialize graph matcher. + + Parameters + ---------- + G1, G2: graph + The graphs to be tested. + + node_match: callable + A function that returns True iff node n1 in G1 and n2 in G2 + should be considered equal during the isomorphism test. The + function will be called like:: + + node_match(G1.nodes[n1], G2.nodes[n2]) + + That is, the function will receive the node attribute dictionaries + of the nodes under consideration. If None, then no attributes are + considered when testing for an isomorphism. + + edge_match: callable + A function that returns True iff the edge attribute dictionary for + the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be + considered equal during the isomorphism test. The function will be + called like:: + + edge_match(G1[u1][v1], G2[u2][v2]) + + That is, the function will receive the edge attribute dictionaries + of the edges under consideration. If None, then no attributes are + considered when testing for an isomorphism. + + """ + vf2.GraphMatcher.__init__(self, G1, G2) + + self.node_match = node_match + self.edge_match = edge_match + + # These will be modified during checks to minimize code repeat. + self.G1_adj = self.G1.adj + self.G2_adj = self.G2.adj + + semantic_feasibility = _semantic_feasibility + + +class DiGraphMatcher(vf2.DiGraphMatcher): + """VF2 isomorphism checker for directed graphs.""" + + def __init__(self, G1, G2, node_match=None, edge_match=None): + """Initialize graph matcher. + + Parameters + ---------- + G1, G2 : graph + The graphs to be tested. + + node_match : callable + A function that returns True iff node n1 in G1 and n2 in G2 + should be considered equal during the isomorphism test. The + function will be called like:: + + node_match(G1.nodes[n1], G2.nodes[n2]) + + That is, the function will receive the node attribute dictionaries + of the nodes under consideration. If None, then no attributes are + considered when testing for an isomorphism. + + edge_match : callable + A function that returns True iff the edge attribute dictionary for + the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be + considered equal during the isomorphism test. The function will be + called like:: + + edge_match(G1[u1][v1], G2[u2][v2]) + + That is, the function will receive the edge attribute dictionaries + of the edges under consideration. If None, then no attributes are + considered when testing for an isomorphism. + + """ + vf2.DiGraphMatcher.__init__(self, G1, G2) + + self.node_match = node_match + self.edge_match = edge_match + + # These will be modified during checks to minimize code repeat. + self.G1_adj = self.G1.adj + self.G2_adj = self.G2.adj + + def semantic_feasibility(self, G1_node, G2_node): + """Returns True if mapping G1_node to G2_node is semantically feasible.""" + + # Test node_match and also test edge_match on successors + feasible = _semantic_feasibility(self, G1_node, G2_node) + if not feasible: + return False + + # Test edge_match on predecessors + self.G1_adj = self.G1.pred + self.G2_adj = self.G2.pred + feasible = _semantic_feasibility(self, G1_node, G2_node) + self.G1_adj = self.G1.adj + self.G2_adj = self.G2.adj + + return feasible + + +# The "semantics" of edge_match are different for multi(di)graphs, but +# the implementation is the same. So, technically we do not need to +# provide "multi" versions, but we do so to match NetworkX's base classes. + + +class MultiGraphMatcher(GraphMatcher): + """VF2 isomorphism checker for undirected multigraphs.""" + + +class MultiDiGraphMatcher(DiGraphMatcher): + """VF2 isomorphism checker for directed multigraphs.""" diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__init__.py new file mode 100644 index 0000000..6009f00 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__init__.py @@ -0,0 +1,2 @@ +from networkx.algorithms.link_analysis.hits_alg import * +from networkx.algorithms.link_analysis.pagerank_alg import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..02973b3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-312.pyc new file mode 100644 index 0000000..5895347 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/hits_alg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-312.pyc new file mode 100644 index 0000000..154dfb6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/__pycache__/pagerank_alg.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/hits_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/hits_alg.py new file mode 100644 index 0000000..d9e3069 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/hits_alg.py @@ -0,0 +1,337 @@ +"""Hubs and authorities analysis of graph structure.""" + +import networkx as nx + +__all__ = ["hits"] + + +@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}}) +def hits(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): + """Returns HITS hubs and authorities values for nodes. + + The HITS algorithm computes two numbers for a node. + Authorities estimates the node value based on the incoming links. + Hubs estimates the node value based on outgoing links. + + Parameters + ---------- + G : graph + A NetworkX graph + + max_iter : integer, optional + Maximum number of iterations in power method. + + tol : float, optional + Error tolerance used to check convergence in power method iteration. + + nstart : dictionary, optional + Starting value of each node for power method iteration. + + normalized : bool (default=True) + Normalize results by the sum of all of the values. + + Returns + ------- + (hubs,authorities) : two-tuple of dictionaries + Two dictionaries keyed by node containing the hub and authority + values. + + Raises + ------ + PowerIterationFailedConvergence + If the algorithm fails to converge to the specified tolerance + within the specified number of iterations of the power iteration + method. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> h, a = nx.hits(G) + + Notes + ----- + The eigenvector calculation is done by the power iteration method + and has no guarantee of convergence. The iteration will stop + after max_iter iterations or an error tolerance of + number_of_nodes(G)*tol has been reached. + + The HITS algorithm was designed for directed graphs but this + algorithm does not check if the input graph is directed and will + execute on undirected graphs. + + References + ---------- + .. [1] A. Langville and C. Meyer, + "A survey of eigenvector methods of web information retrieval." + http://citeseer.ist.psu.edu/713792.html + .. [2] Jon Kleinberg, + Authoritative sources in a hyperlinked environment + Journal of the ACM 46 (5): 604-32, 1999. + doi:10.1145/324133.324140. + http://www.cs.cornell.edu/home/kleinber/auth.pdf. + """ + import numpy as np + import scipy as sp + + if len(G) == 0: + return {}, {} + A = nx.adjacency_matrix(G, nodelist=list(G), dtype=float) + + if nstart is not None: + nstart = np.array(list(nstart.values())) + if max_iter <= 0: + raise nx.PowerIterationFailedConvergence(max_iter) + try: + _, _, vt = sp.sparse.linalg.svds(A, k=1, v0=nstart, maxiter=max_iter, tol=tol) + except sp.sparse.linalg.ArpackNoConvergence as exc: + raise nx.PowerIterationFailedConvergence(max_iter) from exc + + a = vt.flatten().real + h = A @ a + if normalized: + h /= h.sum() + a /= a.sum() + hubs = dict(zip(G, map(float, h))) + authorities = dict(zip(G, map(float, a))) + return hubs, authorities + + +def _hits_python(G, max_iter=100, tol=1.0e-8, nstart=None, normalized=True): + if isinstance(G, nx.MultiGraph | nx.MultiDiGraph): + raise Exception("hits() not defined for graphs with multiedges.") + if len(G) == 0: + return {}, {} + # choose fixed starting vector if not given + if nstart is None: + h = dict.fromkeys(G, 1.0 / G.number_of_nodes()) + else: + h = nstart + # normalize starting vector + s = 1.0 / sum(h.values()) + for k in h: + h[k] *= s + for _ in range(max_iter): # power iteration: make up to max_iter iterations + hlast = h + h = dict.fromkeys(hlast.keys(), 0) + a = dict.fromkeys(hlast.keys(), 0) + # this "matrix multiply" looks odd because it is + # doing a left multiply a^T=hlast^T*G + for n in h: + for nbr in G[n]: + a[nbr] += hlast[n] * G[n][nbr].get("weight", 1) + # now multiply h=Ga + for n in h: + for nbr in G[n]: + h[n] += a[nbr] * G[n][nbr].get("weight", 1) + # normalize vector + s = 1.0 / max(h.values()) + for n in h: + h[n] *= s + # normalize vector + s = 1.0 / max(a.values()) + for n in a: + a[n] *= s + # check convergence, l1 norm + err = sum(abs(h[n] - hlast[n]) for n in h) + if err < tol: + break + else: + raise nx.PowerIterationFailedConvergence(max_iter) + if normalized: + s = 1.0 / sum(a.values()) + for n in a: + a[n] *= s + s = 1.0 / sum(h.values()) + for n in h: + h[n] *= s + return h, a + + +def _hits_numpy(G, normalized=True): + """Returns HITS hubs and authorities values for nodes. + + The HITS algorithm computes two numbers for a node. + Authorities estimates the node value based on the incoming links. + Hubs estimates the node value based on outgoing links. + + Parameters + ---------- + G : graph + A NetworkX graph + + normalized : bool (default=True) + Normalize results by the sum of all of the values. + + Returns + ------- + (hubs,authorities) : two-tuple of dictionaries + Two dictionaries keyed by node containing the hub and authority + values. + + Examples + -------- + >>> G = nx.path_graph(4) + + The `hubs` and `authorities` are given by the eigenvectors corresponding to the + maximum eigenvalues of the hubs_matrix and the authority_matrix, respectively. + + The ``hubs`` and ``authority`` matrices are computed from the adjacency + matrix: + + >>> adj_ary = nx.to_numpy_array(G) + >>> hubs_matrix = adj_ary @ adj_ary.T + >>> authority_matrix = adj_ary.T @ adj_ary + + `_hits_numpy` maps the eigenvector corresponding to the maximum eigenvalue + of the respective matrices to the nodes in `G`: + + >>> from networkx.algorithms.link_analysis.hits_alg import _hits_numpy + >>> hubs, authority = _hits_numpy(G) + + Notes + ----- + The eigenvector calculation uses NumPy's interface to LAPACK. + + The HITS algorithm was designed for directed graphs but this + algorithm does not check if the input graph is directed and will + execute on undirected graphs. + + References + ---------- + .. [1] A. Langville and C. Meyer, + "A survey of eigenvector methods of web information retrieval." + http://citeseer.ist.psu.edu/713792.html + .. [2] Jon Kleinberg, + Authoritative sources in a hyperlinked environment + Journal of the ACM 46 (5): 604-32, 1999. + doi:10.1145/324133.324140. + http://www.cs.cornell.edu/home/kleinber/auth.pdf. + """ + import numpy as np + + if len(G) == 0: + return {}, {} + adj_ary = nx.to_numpy_array(G) + # Hub matrix + H = adj_ary @ adj_ary.T + e, ev = np.linalg.eig(H) + h = ev[:, np.argmax(e)] # eigenvector corresponding to the maximum eigenvalue + # Authority matrix + A = adj_ary.T @ adj_ary + e, ev = np.linalg.eig(A) + a = ev[:, np.argmax(e)] # eigenvector corresponding to the maximum eigenvalue + if normalized: + h /= h.sum() + a /= a.sum() + else: + h /= h.max() + a /= a.max() + hubs = dict(zip(G, map(float, h))) + authorities = dict(zip(G, map(float, a))) + return hubs, authorities + + +def _hits_scipy(G, max_iter=100, tol=1.0e-6, nstart=None, normalized=True): + """Returns HITS hubs and authorities values for nodes. + + + The HITS algorithm computes two numbers for a node. + Authorities estimates the node value based on the incoming links. + Hubs estimates the node value based on outgoing links. + + Parameters + ---------- + G : graph + A NetworkX graph + + max_iter : integer, optional + Maximum number of iterations in power method. + + tol : float, optional + Error tolerance used to check convergence in power method iteration. + + nstart : dictionary, optional + Starting value of each node for power method iteration. + + normalized : bool (default=True) + Normalize results by the sum of all of the values. + + Returns + ------- + (hubs,authorities) : two-tuple of dictionaries + Two dictionaries keyed by node containing the hub and authority + values. + + Examples + -------- + >>> from networkx.algorithms.link_analysis.hits_alg import _hits_scipy + >>> G = nx.path_graph(4) + >>> h, a = _hits_scipy(G) + + Notes + ----- + This implementation uses SciPy sparse matrices. + + The eigenvector calculation is done by the power iteration method + and has no guarantee of convergence. The iteration will stop + after max_iter iterations or an error tolerance of + number_of_nodes(G)*tol has been reached. + + The HITS algorithm was designed for directed graphs but this + algorithm does not check if the input graph is directed and will + execute on undirected graphs. + + Raises + ------ + PowerIterationFailedConvergence + If the algorithm fails to converge to the specified tolerance + within the specified number of iterations of the power iteration + method. + + References + ---------- + .. [1] A. Langville and C. Meyer, + "A survey of eigenvector methods of web information retrieval." + http://citeseer.ist.psu.edu/713792.html + .. [2] Jon Kleinberg, + Authoritative sources in a hyperlinked environment + Journal of the ACM 46 (5): 604-632, 1999. + doi:10.1145/324133.324140. + http://www.cs.cornell.edu/home/kleinber/auth.pdf. + """ + import numpy as np + + if len(G) == 0: + return {}, {} + A = nx.to_scipy_sparse_array(G, nodelist=list(G)) + (n, _) = A.shape # should be square + ATA = A.T @ A # authority matrix + # choose fixed starting vector if not given + if nstart is None: + x = np.ones((n, 1)) / n + else: + x = np.array([nstart.get(n, 0) for n in list(G)], dtype=float) + x /= x.sum() + + # power iteration on authority matrix + i = 0 + while True: + xlast = x + x = ATA @ x + x /= x.max() + # check convergence, l1 norm + err = np.absolute(x - xlast).sum() + if err < tol: + break + if i > max_iter: + raise nx.PowerIterationFailedConvergence(max_iter) + i += 1 + + a = x.flatten() + h = A @ a + if normalized: + h /= h.sum() + a /= a.sum() + hubs = dict(zip(G, map(float, h))) + authorities = dict(zip(G, map(float, a))) + return hubs, authorities diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py new file mode 100644 index 0000000..2ab0d86 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/pagerank_alg.py @@ -0,0 +1,499 @@ +"""PageRank analysis of graph structure.""" + +import networkx as nx + +__all__ = ["pagerank", "google_matrix"] + + +@nx._dispatchable(edge_attrs="weight") +def pagerank( + G, + alpha=0.85, + personalization=None, + max_iter=100, + tol=1.0e-6, + nstart=None, + weight="weight", + dangling=None, +): + """Returns the PageRank of the nodes in the graph. + + PageRank computes a ranking of the nodes in the graph G based on + the structure of the incoming links. It was originally designed as + an algorithm to rank web pages. + + Parameters + ---------- + G : graph + A NetworkX graph. Undirected graphs will be converted to a directed + graph with two directed edges for each undirected edge. + + alpha : float, optional + Damping parameter for PageRank, default=0.85. + + personalization: dict, optional + The "personalization vector" consisting of a dictionary with a + key some subset of graph nodes and personalization value each of those. + At least one personalization value must be non-zero. + If not specified, a nodes personalization value will be zero. + By default, a uniform distribution is used. + + max_iter : integer, optional + Maximum number of iterations in power method eigenvalue solver. + + tol : float, optional + Error tolerance used to check convergence in power method solver. + The iteration will stop after a tolerance of ``len(G) * tol`` is reached. + + nstart : dictionary, optional + Starting value of PageRank iteration for each node. + + weight : key, optional + Edge data key to use as weight. If None weights are set to 1. + + dangling: dict, optional + The outedges to be assigned to any "dangling" nodes, i.e., nodes without + any outedges. The dict key is the node the outedge points to and the dict + value is the weight of that outedge. By default, dangling nodes are given + outedges according to the personalization vector (uniform if not + specified). This must be selected to result in an irreducible transition + matrix (see notes under google_matrix). It may be common to have the + dangling dict to be the same as the personalization dict. + + + Returns + ------- + pagerank : dictionary + Dictionary of nodes with PageRank as value + + Examples + -------- + >>> G = nx.DiGraph(nx.path_graph(4)) + >>> pr = nx.pagerank(G, alpha=0.9) + + Notes + ----- + The eigenvector calculation is done by the power iteration method + and has no guarantee of convergence. The iteration will stop after + an error tolerance of ``len(G) * tol`` has been reached. If the + number of iterations exceed `max_iter`, a + :exc:`networkx.exception.PowerIterationFailedConvergence` exception + is raised. + + The PageRank algorithm was designed for directed graphs but this + algorithm does not check if the input graph is directed and will + execute on undirected graphs by converting each edge in the + directed graph to two edges. + + See Also + -------- + google_matrix + :func:`~networkx.algorithms.bipartite.link_analysis.birank` + + Raises + ------ + PowerIterationFailedConvergence + If the algorithm fails to converge to the specified tolerance + within the specified number of iterations of the power iteration + method. + + References + ---------- + .. [1] A. Langville and C. Meyer, + "A survey of eigenvector methods of web information retrieval." + http://citeseer.ist.psu.edu/713792.html + .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry, + The PageRank citation ranking: Bringing order to the Web. 1999 + http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf + + """ + return _pagerank_scipy( + G, alpha, personalization, max_iter, tol, nstart, weight, dangling + ) + + +def _pagerank_python( + G, + alpha=0.85, + personalization=None, + max_iter=100, + tol=1.0e-6, + nstart=None, + weight="weight", + dangling=None, +): + if len(G) == 0: + return {} + + D = G.to_directed() + + # Create a copy in (right) stochastic form + W = nx.stochastic_graph(D, weight=weight) + N = W.number_of_nodes() + + # Choose fixed starting vector if not given + if nstart is None: + x = dict.fromkeys(W, 1.0 / N) + else: + # Normalized nstart vector + s = sum(nstart.values()) + x = {k: v / s for k, v in nstart.items()} + + if personalization is None: + # Assign uniform personalization vector if not given + p = dict.fromkeys(W, 1.0 / N) + else: + s = sum(personalization.values()) + p = {k: v / s for k, v in personalization.items()} + + if dangling is None: + # Use personalization vector if dangling vector not specified + dangling_weights = p + else: + s = sum(dangling.values()) + dangling_weights = {k: v / s for k, v in dangling.items()} + dangling_nodes = [n for n in W if W.out_degree(n, weight=weight) == 0.0] + + # power iteration: make up to max_iter iterations + for _ in range(max_iter): + xlast = x + x = dict.fromkeys(xlast.keys(), 0) + danglesum = alpha * sum(xlast[n] for n in dangling_nodes) + for n in x: + # this matrix multiply looks odd because it is + # doing a left multiply x^T=xlast^T*W + for _, nbr, wt in W.edges(n, data=weight): + x[nbr] += alpha * xlast[n] * wt + x[n] += danglesum * dangling_weights.get(n, 0) + (1.0 - alpha) * p.get(n, 0) + # check convergence, l1 norm + err = sum(abs(x[n] - xlast[n]) for n in x) + if err < N * tol: + return x + raise nx.PowerIterationFailedConvergence(max_iter) + + +@nx._dispatchable(edge_attrs="weight") +def google_matrix( + G, alpha=0.85, personalization=None, nodelist=None, weight="weight", dangling=None +): + """Returns the Google matrix of the graph. + + Parameters + ---------- + G : graph + A NetworkX graph. Undirected graphs will be converted to a directed + graph with two directed edges for each undirected edge. + + alpha : float + The damping factor. + + personalization: dict, optional + The "personalization vector" consisting of a dictionary with a + key some subset of graph nodes and personalization value each of those. + At least one personalization value must be non-zero. + If not specified, a nodes personalization value will be zero. + By default, a uniform distribution is used. + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : key, optional + Edge data key to use as weight. If None weights are set to 1. + + dangling: dict, optional + The outedges to be assigned to any "dangling" nodes, i.e., nodes without + any outedges. The dict key is the node the outedge points to and the dict + value is the weight of that outedge. By default, dangling nodes are given + outedges according to the personalization vector (uniform if not + specified) This must be selected to result in an irreducible transition + matrix (see notes below). It may be common to have the dangling dict to + be the same as the personalization dict. + + Returns + ------- + A : 2D NumPy ndarray + Google matrix of the graph + + Notes + ----- + The array returned represents the transition matrix that describes the + Markov chain used in PageRank. For PageRank to converge to a unique + solution (i.e., a unique stationary distribution in a Markov chain), the + transition matrix must be irreducible. In other words, it must be that + there exists a path between every pair of nodes in the graph, or else there + is the potential of "rank sinks." + + This implementation works with Multi(Di)Graphs. For multigraphs the + weight between two nodes is set to be the sum of all edge weights + between those nodes. + + See Also + -------- + pagerank + """ + import numpy as np + + if nodelist is None: + nodelist = list(G) + + A = nx.to_numpy_array(G, nodelist=nodelist, weight=weight) + N = len(G) + if N == 0: + return A + + # Personalization vector + if personalization is None: + p = np.repeat(1.0 / N, N) + else: + p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float) + if p.sum() == 0: + raise ZeroDivisionError + p /= p.sum() + + # Dangling nodes + if dangling is None: + dangling_weights = p + else: + # Convert the dangling dictionary into an array in nodelist order + dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float) + dangling_weights /= dangling_weights.sum() + dangling_nodes = np.where(A.sum(axis=1) == 0)[0] + + # Assign dangling_weights to any dangling nodes (nodes with no out links) + A[dangling_nodes] = dangling_weights + + A /= A.sum(axis=1)[:, np.newaxis] # Normalize rows to sum to 1 + + return alpha * A + (1 - alpha) * p + + +def _pagerank_numpy( + G, alpha=0.85, personalization=None, weight="weight", dangling=None +): + """Returns the PageRank of the nodes in the graph. + + PageRank computes a ranking of the nodes in the graph G based on + the structure of the incoming links. It was originally designed as + an algorithm to rank web pages. + + Parameters + ---------- + G : graph + A NetworkX graph. Undirected graphs will be converted to a directed + graph with two directed edges for each undirected edge. + + alpha : float, optional + Damping parameter for PageRank, default=0.85. + + personalization: dict, optional + The "personalization vector" consisting of a dictionary with a + key some subset of graph nodes and personalization value each of those. + At least one personalization value must be non-zero. + If not specified, a nodes personalization value will be zero. + By default, a uniform distribution is used. + + weight : key, optional + Edge data key to use as weight. If None weights are set to 1. + + dangling: dict, optional + The outedges to be assigned to any "dangling" nodes, i.e., nodes without + any outedges. The dict key is the node the outedge points to and the dict + value is the weight of that outedge. By default, dangling nodes are given + outedges according to the personalization vector (uniform if not + specified) This must be selected to result in an irreducible transition + matrix (see notes under google_matrix). It may be common to have the + dangling dict to be the same as the personalization dict. + + Returns + ------- + pagerank : dictionary + Dictionary of nodes with PageRank as value. + + Examples + -------- + >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_numpy + >>> G = nx.DiGraph(nx.path_graph(4)) + >>> pr = _pagerank_numpy(G, alpha=0.9) + + Notes + ----- + The eigenvector calculation uses NumPy's interface to the LAPACK + eigenvalue solvers. This will be the fastest and most accurate + for small graphs. + + This implementation works with Multi(Di)Graphs. For multigraphs the + weight between two nodes is set to be the sum of all edge weights + between those nodes. + + See Also + -------- + pagerank, google_matrix + + References + ---------- + .. [1] A. Langville and C. Meyer, + "A survey of eigenvector methods of web information retrieval." + http://citeseer.ist.psu.edu/713792.html + .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry, + The PageRank citation ranking: Bringing order to the Web. 1999 + http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf + """ + import numpy as np + + if len(G) == 0: + return {} + M = google_matrix( + G, alpha, personalization=personalization, weight=weight, dangling=dangling + ) + # use numpy LAPACK solver + eigenvalues, eigenvectors = np.linalg.eig(M.T) + ind = np.argmax(eigenvalues) + # eigenvector of largest eigenvalue is at ind, normalized + largest = np.array(eigenvectors[:, ind]).flatten().real + norm = largest.sum() + return dict(zip(G, map(float, largest / norm))) + + +def _pagerank_scipy( + G, + alpha=0.85, + personalization=None, + max_iter=100, + tol=1.0e-6, + nstart=None, + weight="weight", + dangling=None, +): + """Returns the PageRank of the nodes in the graph. + + PageRank computes a ranking of the nodes in the graph G based on + the structure of the incoming links. It was originally designed as + an algorithm to rank web pages. + + Parameters + ---------- + G : graph + A NetworkX graph. Undirected graphs will be converted to a directed + graph with two directed edges for each undirected edge. + + alpha : float, optional + Damping parameter for PageRank, default=0.85. + + personalization: dict, optional + The "personalization vector" consisting of a dictionary with a + key some subset of graph nodes and personalization value each of those. + At least one personalization value must be non-zero. + If not specified, a nodes personalization value will be zero. + By default, a uniform distribution is used. + + max_iter : integer, optional + Maximum number of iterations in power method eigenvalue solver. + + tol : float, optional + Error tolerance used to check convergence in power method solver. + The iteration will stop after a tolerance of ``len(G) * tol`` is reached. + + nstart : dictionary, optional + Starting value of PageRank iteration for each node. + + weight : key, optional + Edge data key to use as weight. If None weights are set to 1. + + dangling: dict, optional + The outedges to be assigned to any "dangling" nodes, i.e., nodes without + any outedges. The dict key is the node the outedge points to and the dict + value is the weight of that outedge. By default, dangling nodes are given + outedges according to the personalization vector (uniform if not + specified) This must be selected to result in an irreducible transition + matrix (see notes under google_matrix). It may be common to have the + dangling dict to be the same as the personalization dict. + + Returns + ------- + pagerank : dictionary + Dictionary of nodes with PageRank as value + + Examples + -------- + >>> from networkx.algorithms.link_analysis.pagerank_alg import _pagerank_scipy + >>> G = nx.DiGraph(nx.path_graph(4)) + >>> pr = _pagerank_scipy(G, alpha=0.9) + + Notes + ----- + The eigenvector calculation uses power iteration with a SciPy + sparse matrix representation. + + This implementation works with Multi(Di)Graphs. For multigraphs the + weight between two nodes is set to be the sum of all edge weights + between those nodes. + + See Also + -------- + pagerank + + Raises + ------ + PowerIterationFailedConvergence + If the algorithm fails to converge to the specified tolerance + within the specified number of iterations of the power iteration + method. + + References + ---------- + .. [1] A. Langville and C. Meyer, + "A survey of eigenvector methods of web information retrieval." + http://citeseer.ist.psu.edu/713792.html + .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry, + The PageRank citation ranking: Bringing order to the Web. 1999 + http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf + """ + import numpy as np + import scipy as sp + + N = len(G) + if N == 0: + return {} + + nodelist = list(G) + A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, dtype=float) + S = A.sum(axis=1) + S[S != 0] = 1.0 / S[S != 0] + # TODO: csr_array + Q = sp.sparse.csr_array(sp.sparse.spdiags(S.T, 0, *A.shape)) + A = Q @ A + + # initial vector + if nstart is None: + x = np.repeat(1.0 / N, N) + else: + x = np.array([nstart.get(n, 0) for n in nodelist], dtype=float) + x /= x.sum() + + # Personalization vector + if personalization is None: + p = np.repeat(1.0 / N, N) + else: + p = np.array([personalization.get(n, 0) for n in nodelist], dtype=float) + if p.sum() == 0: + raise ZeroDivisionError + p /= p.sum() + # Dangling nodes + if dangling is None: + dangling_weights = p + else: + # Convert the dangling dictionary into an array in nodelist order + dangling_weights = np.array([dangling.get(n, 0) for n in nodelist], dtype=float) + dangling_weights /= dangling_weights.sum() + is_dangling = np.where(S == 0)[0] + + # power iteration: make up to max_iter iterations + for _ in range(max_iter): + xlast = x + x = alpha * (x @ A + sum(x[is_dangling]) * dangling_weights) + (1 - alpha) * p + # check convergence, l1 norm + err = np.absolute(x - xlast).sum() + if err < N * tol: + return dict(zip(nodelist, map(float, x))) + raise nx.PowerIterationFailedConvergence(max_iter) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6b5a85d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-312.pyc new file mode 100644 index 0000000..be8f89b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_hits.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-312.pyc new file mode 100644 index 0000000..91978ec Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/__pycache__/test_pagerank.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py new file mode 100644 index 0000000..fedd59c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_hits.py @@ -0,0 +1,77 @@ +import pytest + +import networkx as nx +from networkx.algorithms.link_analysis.hits_alg import ( + _hits_numpy, + _hits_python, + _hits_scipy, +) + +np = pytest.importorskip("numpy") +sp = pytest.importorskip("scipy") + +# Example from +# A. Langville and C. Meyer, "A survey of eigenvector methods of web +# information retrieval." http://citeseer.ist.psu.edu/713792.html + + +class TestHITS: + @classmethod + def setup_class(cls): + G = nx.DiGraph() + + edges = [(1, 3), (1, 5), (2, 1), (3, 5), (5, 4), (5, 3), (6, 5)] + + G.add_edges_from(edges, weight=1) + cls.G = G + cls.G.a = dict( + zip(sorted(G), [0.000000, 0.000000, 0.366025, 0.133975, 0.500000, 0.000000]) + ) + cls.G.h = dict( + zip(sorted(G), [0.366025, 0.000000, 0.211325, 0.000000, 0.211325, 0.211325]) + ) + + def test_hits_numpy(self): + G = self.G + h, a = _hits_numpy(G) + for n in G: + assert h[n] == pytest.approx(G.h[n], abs=1e-4) + for n in G: + assert a[n] == pytest.approx(G.a[n], abs=1e-4) + + @pytest.mark.parametrize("hits_alg", (nx.hits, _hits_python, _hits_scipy)) + def test_hits(self, hits_alg): + G = self.G + h, a = hits_alg(G, tol=1.0e-08) + for n in G: + assert h[n] == pytest.approx(G.h[n], abs=1e-4) + for n in G: + assert a[n] == pytest.approx(G.a[n], abs=1e-4) + nstart = dict.fromkeys(G, 1.0 / 2) + h, a = hits_alg(G, nstart=nstart) + for n in G: + assert h[n] == pytest.approx(G.h[n], abs=1e-4) + for n in G: + assert a[n] == pytest.approx(G.a[n], abs=1e-4) + + def test_empty(self): + G = nx.Graph() + assert nx.hits(G) == ({}, {}) + assert _hits_numpy(G) == ({}, {}) + assert _hits_python(G) == ({}, {}) + assert _hits_scipy(G) == ({}, {}) + + def test_hits_not_convergent(self): + G = nx.path_graph(50) + with pytest.raises(nx.PowerIterationFailedConvergence): + _hits_scipy(G, max_iter=1) + with pytest.raises(nx.PowerIterationFailedConvergence): + _hits_python(G, max_iter=1) + with pytest.raises(nx.PowerIterationFailedConvergence): + _hits_scipy(G, max_iter=0) + with pytest.raises(nx.PowerIterationFailedConvergence): + _hits_python(G, max_iter=0) + with pytest.raises(nx.PowerIterationFailedConvergence): + nx.hits(G, max_iter=0) + with pytest.raises(nx.PowerIterationFailedConvergence): + nx.hits(G, max_iter=1) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py new file mode 100644 index 0000000..fb08423 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_analysis/tests/test_pagerank.py @@ -0,0 +1,213 @@ +import random + +import pytest + +import networkx as nx +from networkx.algorithms.link_analysis.pagerank_alg import ( + _pagerank_numpy, + _pagerank_python, + _pagerank_scipy, +) +from networkx.classes.tests import dispatch_interface + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + +# Example from +# A. Langville and C. Meyer, "A survey of eigenvector methods of web +# information retrieval." http://citeseer.ist.psu.edu/713792.html + + +class TestPageRank: + @classmethod + def setup_class(cls): + G = nx.DiGraph() + edges = [ + (1, 2), + (1, 3), + # 2 is a dangling node + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ] + G.add_edges_from(edges) + cls.G = G + cls.G.pagerank = dict( + zip( + sorted(G), + [ + 0.03721197, + 0.05395735, + 0.04150565, + 0.37508082, + 0.20599833, + 0.28624589, + ], + ) + ) + cls.dangling_node_index = 1 + cls.dangling_edges = {1: 2, 2: 3, 3: 0, 4: 0, 5: 0, 6: 0} + cls.G.dangling_pagerank = dict( + zip( + sorted(G), + [0.10844518, 0.18618601, 0.0710892, 0.2683668, 0.15919783, 0.20671497], + ) + ) + + @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python)) + def test_pagerank(self, alg): + G = self.G + p = alg(G, alpha=0.9, tol=1.0e-08) + for n in G: + assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4) + + nstart = {n: random.random() for n in G} + p = alg(G, alpha=0.9, tol=1.0e-08, nstart=nstart) + for n in G: + assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4) + + @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python)) + def test_pagerank_max_iter(self, alg): + with pytest.raises(nx.PowerIterationFailedConvergence): + alg(self.G, max_iter=0) + + def test_numpy_pagerank(self): + G = self.G + p = _pagerank_numpy(G, alpha=0.9) + for n in G: + assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4) + + def test_google_matrix(self): + G = self.G + M = nx.google_matrix(G, alpha=0.9, nodelist=sorted(G)) + _, ev = np.linalg.eig(M.T) + p = ev[:, 0] / ev[:, 0].sum() + for a, b in zip(p, self.G.pagerank.values()): + assert a == pytest.approx(b, abs=1e-7) + + @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy)) + def test_personalization(self, alg): + G = nx.complete_graph(4) + personalize = {0: 1, 1: 1, 2: 4, 3: 4} + answer = { + 0: 0.23246732615667579, + 1: 0.23246732615667579, + 2: 0.267532673843324, + 3: 0.2675326738433241, + } + p = alg(G, alpha=0.85, personalization=personalize) + for n in G: + assert p[n] == pytest.approx(answer[n], abs=1e-4) + + @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, nx.google_matrix)) + def test_zero_personalization_vector(self, alg): + G = nx.complete_graph(4) + personalize = {0: 0, 1: 0, 2: 0, 3: 0} + pytest.raises(ZeroDivisionError, alg, G, personalization=personalize) + + @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python)) + def test_one_nonzero_personalization_value(self, alg): + G = nx.complete_graph(4) + personalize = {0: 0, 1: 0, 2: 0, 3: 1} + answer = { + 0: 0.22077931820379187, + 1: 0.22077931820379187, + 2: 0.22077931820379187, + 3: 0.3376620453886241, + } + p = alg(G, alpha=0.85, personalization=personalize) + for n in G: + assert p[n] == pytest.approx(answer[n], abs=1e-4) + + @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python)) + def test_incomplete_personalization(self, alg): + G = nx.complete_graph(4) + personalize = {3: 1} + answer = { + 0: 0.22077931820379187, + 1: 0.22077931820379187, + 2: 0.22077931820379187, + 3: 0.3376620453886241, + } + p = alg(G, alpha=0.85, personalization=personalize) + for n in G: + assert p[n] == pytest.approx(answer[n], abs=1e-4) + + def test_dangling_matrix(self): + """ + Tests that the google_matrix doesn't change except for the dangling + nodes. + """ + G = self.G + dangling = self.dangling_edges + dangling_sum = sum(dangling.values()) + M1 = nx.google_matrix(G, personalization=dangling) + M2 = nx.google_matrix(G, personalization=dangling, dangling=dangling) + for i in range(len(G)): + for j in range(len(G)): + if i == self.dangling_node_index and (j + 1) in dangling: + assert M2[i, j] == pytest.approx( + dangling[j + 1] / dangling_sum, abs=1e-4 + ) + else: + assert M2[i, j] == pytest.approx(M1[i, j], abs=1e-4) + + @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python, _pagerank_numpy)) + def test_dangling_pagerank(self, alg): + pr = alg(self.G, dangling=self.dangling_edges) + for n in self.G: + assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4) + + def test_empty(self): + G = nx.Graph() + assert nx.pagerank(G) == {} + assert _pagerank_python(G) == {} + assert _pagerank_numpy(G) == {} + assert nx.google_matrix(G).shape == (0, 0) + + @pytest.mark.parametrize("alg", (nx.pagerank, _pagerank_python)) + def test_multigraph(self, alg): + G = nx.MultiGraph() + G.add_edges_from([(1, 2), (1, 2), (1, 2), (2, 3), (2, 3), ("3", 3), ("3", 3)]) + answer = { + 1: 0.21066048614468322, + 2: 0.3395308825985378, + 3: 0.28933951385531687, + "3": 0.16046911740146227, + } + p = alg(G) + for n in G: + assert p[n] == pytest.approx(answer[n], abs=1e-4) + + +class TestPageRankScipy(TestPageRank): + def test_scipy_pagerank(self): + G = self.G + p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08) + for n in G: + assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4) + personalize = {n: random.random() for n in G} + p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, personalization=personalize) + + nstart = {n: random.random() for n in G} + p = _pagerank_scipy(G, alpha=0.9, tol=1.0e-08, nstart=nstart) + for n in G: + assert p[n] == pytest.approx(G.pagerank[n], abs=1e-4) + + def test_scipy_pagerank_max_iter(self): + with pytest.raises(nx.PowerIterationFailedConvergence): + _pagerank_scipy(self.G, max_iter=0) + + def test_dangling_scipy_pagerank(self): + pr = _pagerank_scipy(self.G, dangling=self.dangling_edges) + for n in self.G: + assert pr[n] == pytest.approx(self.G.dangling_pagerank[n], abs=1e-4) + + def test_empty_scipy(self): + G = nx.Graph() + assert _pagerank_scipy(G) == {} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/link_prediction.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_prediction.py new file mode 100644 index 0000000..3615f26 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/link_prediction.py @@ -0,0 +1,687 @@ +""" +Link prediction algorithms. +""" + +from math import log + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "resource_allocation_index", + "jaccard_coefficient", + "adamic_adar_index", + "preferential_attachment", + "cn_soundarajan_hopcroft", + "ra_index_soundarajan_hopcroft", + "within_inter_cluster", + "common_neighbor_centrality", +] + + +def _apply_prediction(G, func, ebunch=None): + """Applies the given function to each edge in the specified iterable + of edges. + + `G` is an instance of :class:`networkx.Graph`. + + `func` is a function on two inputs, each of which is a node in the + graph. The function can return anything, but it should return a + value representing a prediction of the likelihood of a "link" + joining the two nodes. + + `ebunch` is an iterable of pairs of nodes. If not specified, all + non-edges in the graph `G` will be used. + + """ + if ebunch is None: + ebunch = nx.non_edges(G) + else: + for u, v in ebunch: + if u not in G: + raise nx.NodeNotFound(f"Node {u} not in G.") + if v not in G: + raise nx.NodeNotFound(f"Node {v} not in G.") + return ((u, v, func(u, v)) for u, v in ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def resource_allocation_index(G, ebunch=None): + r"""Compute the resource allocation index of all node pairs in ebunch. + + Resource allocation index of `u` and `v` is defined as + + .. math:: + + \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{|\Gamma(w)|} + + where $\Gamma(u)$ denotes the set of neighbors of $u$. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Resource allocation index will be computed for each pair of + nodes given in the iterable. The pairs must be given as + 2-tuples (u, v) where u and v are nodes in the graph. If ebunch + is None then all nonexistent edges in the graph will be used. + Default value: None. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their resource allocation index. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.resource_allocation_index(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 0.75000000 + (2, 3) -> 0.75000000 + + References + ---------- + .. [1] T. Zhou, L. Lu, Y.-C. Zhang. + Predicting missing links via local information. + Eur. Phys. J. B 71 (2009) 623. + https://arxiv.org/pdf/0901.0553.pdf + """ + + def predict(u, v): + return sum(1 / G.degree(w) for w in nx.common_neighbors(G, u, v)) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def jaccard_coefficient(G, ebunch=None): + r"""Compute the Jaccard coefficient of all node pairs in ebunch. + + Jaccard coefficient of nodes `u` and `v` is defined as + + .. math:: + + \frac{|\Gamma(u) \cap \Gamma(v)|}{|\Gamma(u) \cup \Gamma(v)|} + + where $\Gamma(u)$ denotes the set of neighbors of $u$. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Jaccard coefficient will be computed for each pair of nodes + given in the iterable. The pairs must be given as 2-tuples + (u, v) where u and v are nodes in the graph. If ebunch is None + then all nonexistent edges in the graph will be used. + Default value: None. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their Jaccard coefficient. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.jaccard_coefficient(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 0.60000000 + (2, 3) -> 0.60000000 + + References + ---------- + .. [1] D. Liben-Nowell, J. Kleinberg. + The Link Prediction Problem for Social Networks (2004). + http://www.cs.cornell.edu/home/kleinber/link-pred.pdf + """ + + def predict(u, v): + union_size = len(set(G[u]) | set(G[v])) + if union_size == 0: + return 0 + return len(nx.common_neighbors(G, u, v)) / union_size + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def adamic_adar_index(G, ebunch=None): + r"""Compute the Adamic-Adar index of all node pairs in ebunch. + + Adamic-Adar index of `u` and `v` is defined as + + .. math:: + + \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{1}{\log |\Gamma(w)|} + + where $\Gamma(u)$ denotes the set of neighbors of $u$. + This index leads to zero-division for nodes only connected via self-loops. + It is intended to be used when no self-loops are present. + + Parameters + ---------- + G : graph + NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Adamic-Adar index will be computed for each pair of nodes given + in the iterable. The pairs must be given as 2-tuples (u, v) + where u and v are nodes in the graph. If ebunch is None then all + nonexistent edges in the graph will be used. + Default value: None. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their Adamic-Adar index. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.adamic_adar_index(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 1) -> 2.16404256 + (2, 3) -> 2.16404256 + + References + ---------- + .. [1] D. Liben-Nowell, J. Kleinberg. + The Link Prediction Problem for Social Networks (2004). + http://www.cs.cornell.edu/home/kleinber/link-pred.pdf + """ + + def predict(u, v): + return sum(1 / log(G.degree(w)) for w in nx.common_neighbors(G, u, v)) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def common_neighbor_centrality(G, ebunch=None, alpha=0.8): + r"""Return the CCPA score for each pair of nodes. + + Compute the Common Neighbor and Centrality based Parameterized Algorithm(CCPA) + score of all node pairs in ebunch. + + CCPA score of `u` and `v` is defined as + + .. math:: + + \alpha \cdot (|\Gamma (u){\cap }^{}\Gamma (v)|)+(1-\alpha )\cdot \frac{N}{{d}_{uv}} + + where $\Gamma(u)$ denotes the set of neighbors of $u$, $\Gamma(v)$ denotes the + set of neighbors of $v$, $\alpha$ is parameter varies between [0,1], $N$ denotes + total number of nodes in the Graph and ${d}_{uv}$ denotes shortest distance + between $u$ and $v$. + + This algorithm is based on two vital properties of nodes, namely the number + of common neighbors and their centrality. Common neighbor refers to the common + nodes between two nodes. Centrality refers to the prestige that a node enjoys + in a network. + + .. seealso:: + + :func:`common_neighbors` + + Parameters + ---------- + G : graph + NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Preferential attachment score will be computed for each pair of + nodes given in the iterable. The pairs must be given as + 2-tuples (u, v) where u and v are nodes in the graph. If ebunch + is None then all nonexistent edges in the graph will be used. + Default value: None. + + alpha : Parameter defined for participation of Common Neighbor + and Centrality Algorithm share. Values for alpha should + normally be between 0 and 1. Default value set to 0.8 + because author found better performance at 0.8 for all the + dataset. + Default value: 0.8 + + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their Common Neighbor and Centrality based + Parameterized Algorithm(CCPA) score. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If self loops exist in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.common_neighbor_centrality(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p}") + (0, 1) -> 3.4000000000000004 + (2, 3) -> 3.4000000000000004 + + References + ---------- + .. [1] Ahmad, I., Akhtar, M.U., Noor, S. et al. + Missing Link Prediction using Common Neighbor and Centrality based Parameterized Algorithm. + Sci Rep 10, 364 (2020). + https://doi.org/10.1038/s41598-019-57304-y + """ + + # When alpha == 1, the CCPA score simplifies to the number of common neighbors. + if alpha == 1: + + def predict(u, v): + if u == v: + raise nx.NetworkXAlgorithmError("Self loops are not supported") + + return len(nx.common_neighbors(G, u, v)) + + else: + spl = dict(nx.shortest_path_length(G)) + inf = float("inf") + + def predict(u, v): + if u == v: + raise nx.NetworkXAlgorithmError("Self loops are not supported") + path_len = spl[u].get(v, inf) + + n_nbrs = len(nx.common_neighbors(G, u, v)) + return alpha * n_nbrs + (1 - alpha) * len(G) / path_len + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def preferential_attachment(G, ebunch=None): + r"""Compute the preferential attachment score of all node pairs in ebunch. + + Preferential attachment score of `u` and `v` is defined as + + .. math:: + + |\Gamma(u)| |\Gamma(v)| + + where $\Gamma(u)$ denotes the set of neighbors of $u$. + + Parameters + ---------- + G : graph + NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + Preferential attachment score will be computed for each pair of + nodes given in the iterable. The pairs must be given as + 2-tuples (u, v) where u and v are nodes in the graph. If ebunch + is None then all nonexistent edges in the graph will be used. + Default value: None. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their preferential attachment score. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> preds = nx.preferential_attachment(G, [(0, 1), (2, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p}") + (0, 1) -> 16 + (2, 3) -> 16 + + References + ---------- + .. [1] D. Liben-Nowell, J. Kleinberg. + The Link Prediction Problem for Social Networks (2004). + http://www.cs.cornell.edu/home/kleinber/link-pred.pdf + """ + + def predict(u, v): + return G.degree(u) * G.degree(v) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(node_attrs="community") +def cn_soundarajan_hopcroft(G, ebunch=None, community="community"): + r"""Count the number of common neighbors of all node pairs in ebunch + using community information. + + For two nodes $u$ and $v$, this function computes the number of + common neighbors and bonus one for each common neighbor belonging to + the same community as $u$ and $v$. Mathematically, + + .. math:: + + |\Gamma(u) \cap \Gamma(v)| + \sum_{w \in \Gamma(u) \cap \Gamma(v)} f(w) + + where $f(w)$ equals 1 if $w$ belongs to the same community as $u$ + and $v$ or 0 otherwise and $\Gamma(u)$ denotes the set of + neighbors of $u$. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + The score will be computed for each pair of nodes given in the + iterable. The pairs must be given as 2-tuples (u, v) where u + and v are nodes in the graph. If ebunch is None then all + nonexistent edges in the graph will be used. + Default value: None. + + community : string, optional (default = 'community') + Nodes attribute name containing the community information. + G[u][community] identifies which community u belongs to. Each + node belongs to at most one community. Default value: 'community'. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their score. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 0 + >>> G.nodes[2]["community"] = 0 + >>> preds = nx.cn_soundarajan_hopcroft(G, [(0, 2)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p}") + (0, 2) -> 2 + + References + ---------- + .. [1] Sucheta Soundarajan and John Hopcroft. + Using community information to improve the precision of link + prediction methods. + In Proceedings of the 21st international conference companion on + World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608. + http://doi.acm.org/10.1145/2187980.2188150 + """ + + def predict(u, v): + Cu = _community(G, u, community) + Cv = _community(G, v, community) + cnbors = nx.common_neighbors(G, u, v) + neighbors = ( + sum(_community(G, w, community) == Cu for w in cnbors) if Cu == Cv else 0 + ) + return len(cnbors) + neighbors + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(node_attrs="community") +def ra_index_soundarajan_hopcroft(G, ebunch=None, community="community"): + r"""Compute the resource allocation index of all node pairs in + ebunch using community information. + + For two nodes $u$ and $v$, this function computes the resource + allocation index considering only common neighbors belonging to the + same community as $u$ and $v$. Mathematically, + + .. math:: + + \sum_{w \in \Gamma(u) \cap \Gamma(v)} \frac{f(w)}{|\Gamma(w)|} + + where $f(w)$ equals 1 if $w$ belongs to the same community as $u$ + and $v$ or 0 otherwise and $\Gamma(u)$ denotes the set of + neighbors of $u$. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + The score will be computed for each pair of nodes given in the + iterable. The pairs must be given as 2-tuples (u, v) where u + and v are nodes in the graph. If ebunch is None then all + nonexistent edges in the graph will be used. + Default value: None. + + community : string, optional (default = 'community') + Nodes attribute name containing the community information. + G[u][community] identifies which community u belongs to. Each + node belongs to at most one community. Default value: 'community'. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their score. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 0 + >>> G.nodes[2]["community"] = 1 + >>> G.nodes[3]["community"] = 0 + >>> preds = nx.ra_index_soundarajan_hopcroft(G, [(0, 3)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 3) -> 0.50000000 + + References + ---------- + .. [1] Sucheta Soundarajan and John Hopcroft. + Using community information to improve the precision of link + prediction methods. + In Proceedings of the 21st international conference companion on + World Wide Web (WWW '12 Companion). ACM, New York, NY, USA, 607-608. + http://doi.acm.org/10.1145/2187980.2188150 + """ + + def predict(u, v): + Cu = _community(G, u, community) + Cv = _community(G, v, community) + if Cu != Cv: + return 0 + cnbors = nx.common_neighbors(G, u, v) + return sum(1 / G.degree(w) for w in cnbors if _community(G, w, community) == Cu) + + return _apply_prediction(G, predict, ebunch) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(node_attrs="community") +def within_inter_cluster(G, ebunch=None, delta=0.001, community="community"): + """Compute the ratio of within- and inter-cluster common neighbors + of all node pairs in ebunch. + + For two nodes `u` and `v`, if a common neighbor `w` belongs to the + same community as them, `w` is considered as within-cluster common + neighbor of `u` and `v`. Otherwise, it is considered as + inter-cluster common neighbor of `u` and `v`. The ratio between the + size of the set of within- and inter-cluster common neighbors is + defined as the WIC measure. [1]_ + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + ebunch : iterable of node pairs, optional (default = None) + The WIC measure will be computed for each pair of nodes given in + the iterable. The pairs must be given as 2-tuples (u, v) where + u and v are nodes in the graph. If ebunch is None then all + nonexistent edges in the graph will be used. + Default value: None. + + delta : float, optional (default = 0.001) + Value to prevent division by zero in case there is no + inter-cluster common neighbor between two nodes. See [1]_ for + details. Default value: 0.001. + + community : string, optional (default = 'community') + Nodes attribute name containing the community information. + G[u][community] identifies which community u belongs to. Each + node belongs to at most one community. Default value: 'community'. + + Returns + ------- + piter : iterator + An iterator of 3-tuples in the form (u, v, p) where (u, v) is a + pair of nodes and p is their WIC measure. + + Raises + ------ + NetworkXNotImplemented + If `G` is a `DiGraph`, a `Multigraph` or a `MultiDiGraph`. + + NetworkXAlgorithmError + - If `delta` is less than or equal to zero. + - If no community information is available for a node in `ebunch` or in `G` (if `ebunch` is `None`). + + NodeNotFound + If `ebunch` has a node that is not in `G`. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 4), (2, 4), (3, 4)]) + >>> G.nodes[0]["community"] = 0 + >>> G.nodes[1]["community"] = 1 + >>> G.nodes[2]["community"] = 0 + >>> G.nodes[3]["community"] = 0 + >>> G.nodes[4]["community"] = 0 + >>> preds = nx.within_inter_cluster(G, [(0, 4)]) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 4) -> 1.99800200 + >>> preds = nx.within_inter_cluster(G, [(0, 4)], delta=0.5) + >>> for u, v, p in preds: + ... print(f"({u}, {v}) -> {p:.8f}") + (0, 4) -> 1.33333333 + + References + ---------- + .. [1] Jorge Carlos Valverde-Rebaza and Alneu de Andrade Lopes. + Link prediction in complex networks based on cluster information. + In Proceedings of the 21st Brazilian conference on Advances in + Artificial Intelligence (SBIA'12) + https://doi.org/10.1007/978-3-642-34459-6_10 + """ + if delta <= 0: + raise nx.NetworkXAlgorithmError("Delta must be greater than zero") + + def predict(u, v): + Cu = _community(G, u, community) + Cv = _community(G, v, community) + if Cu != Cv: + return 0 + cnbors = nx.common_neighbors(G, u, v) + within = {w for w in cnbors if _community(G, w, community) == Cu} + inter = cnbors - within + return len(within) / (len(inter) + delta) + + return _apply_prediction(G, predict, ebunch) + + +def _community(G, u, community): + """Get the community of the given node.""" + node_u = G.nodes[u] + try: + return node_u[community] + except KeyError as err: + raise nx.NetworkXAlgorithmError( + f"No community information available for Node {u}" + ) from err diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/lowest_common_ancestors.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/lowest_common_ancestors.py new file mode 100644 index 0000000..963a783 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/lowest_common_ancestors.py @@ -0,0 +1,280 @@ +"""Algorithms for finding the lowest common ancestor of trees and DAGs.""" + +from collections import defaultdict +from collections.abc import Mapping, Set +from itertools import combinations_with_replacement + +import networkx as nx +from networkx.utils import UnionFind, arbitrary_element, not_implemented_for + +__all__ = [ + "all_pairs_lowest_common_ancestor", + "tree_all_pairs_lowest_common_ancestor", + "lowest_common_ancestor", +] + + +@not_implemented_for("undirected") +@nx._dispatchable +def all_pairs_lowest_common_ancestor(G, pairs=None): + """Return the lowest common ancestor of all pairs or the provided pairs + + Parameters + ---------- + G : NetworkX directed graph + + pairs : iterable of pairs of nodes, optional (default: all pairs) + The pairs of nodes of interest. + If None, will find the LCA of all pairs of nodes. + + Yields + ------ + ((node1, node2), lca) : 2-tuple + Where lca is least common ancestor of node1 and node2. + Note that for the default case, the order of the node pair is not considered, + e.g. you will not get both ``(a, b)`` and ``(b, a)`` + + Raises + ------ + NetworkXPointlessConcept + If `G` is null. + NetworkXError + If `G` is not a DAG. + + Examples + -------- + >>> from pprint import pprint + + The default behavior is to yield the lowest common ancestor for all + possible combinations of nodes in `G`, including self-pairings: + + >>> G = nx.DiGraph([(0, 1), (0, 3), (1, 2)]) + >>> pprint(dict(nx.all_pairs_lowest_common_ancestor(G))) + {(0, 0): 0, + (0, 1): 0, + (0, 2): 0, + (0, 3): 0, + (1, 1): 1, + (1, 2): 1, + (1, 3): 0, + (2, 2): 2, + (3, 2): 0, + (3, 3): 3} + + The pairs argument can be used to limit the output to only the + specified node pairings: + + >>> dict(nx.all_pairs_lowest_common_ancestor(G, pairs=[(1, 2), (2, 3)])) + {(1, 2): 1, (2, 3): 0} + + Notes + ----- + Only defined on non-null directed acyclic graphs. + + See Also + -------- + lowest_common_ancestor + """ + if not nx.is_directed_acyclic_graph(G): + raise nx.NetworkXError("LCA only defined on directed acyclic graphs.") + if len(G) == 0: + raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.") + + if pairs is None: + pairs = combinations_with_replacement(G, 2) + else: + # Convert iterator to iterable, if necessary. Trim duplicates. + pairs = dict.fromkeys(pairs) + # Verify that each of the nodes in the provided pairs is in G + nodeset = set(G) + for pair in pairs: + if set(pair) - nodeset: + raise nx.NodeNotFound( + f"Node(s) {set(pair) - nodeset} from pair {pair} not in G." + ) + + # Once input validation is done, construct the generator + def generate_lca_from_pairs(G, pairs): + ancestor_cache = {} + + for v, w in pairs: + if v not in ancestor_cache: + ancestor_cache[v] = nx.ancestors(G, v) + ancestor_cache[v].add(v) + if w not in ancestor_cache: + ancestor_cache[w] = nx.ancestors(G, w) + ancestor_cache[w].add(w) + + common_ancestors = ancestor_cache[v] & ancestor_cache[w] + + if common_ancestors: + common_ancestor = next(iter(common_ancestors)) + while True: + successor = None + for lower_ancestor in G.successors(common_ancestor): + if lower_ancestor in common_ancestors: + successor = lower_ancestor + break + if successor is None: + break + common_ancestor = successor + yield ((v, w), common_ancestor) + + return generate_lca_from_pairs(G, pairs) + + +@not_implemented_for("undirected") +@nx._dispatchable +def lowest_common_ancestor(G, node1, node2, default=None): + """Compute the lowest common ancestor of the given pair of nodes. + + Parameters + ---------- + G : NetworkX directed graph + + node1, node2 : nodes in the graph. + + default : object + Returned if no common ancestor between `node1` and `node2` + + Returns + ------- + The lowest common ancestor of node1 and node2, + or default if they have no common ancestors. + + Examples + -------- + >>> G = nx.DiGraph() + >>> nx.add_path(G, (0, 1, 2, 3)) + >>> nx.add_path(G, (0, 4, 3)) + >>> nx.lowest_common_ancestor(G, 2, 4) + 0 + + See Also + -------- + all_pairs_lowest_common_ancestor""" + + ans = list(all_pairs_lowest_common_ancestor(G, pairs=[(node1, node2)])) + if ans: + assert len(ans) == 1 + return ans[0][1] + return default + + +@not_implemented_for("undirected") +@nx._dispatchable +def tree_all_pairs_lowest_common_ancestor(G, root=None, pairs=None): + r"""Yield the lowest common ancestor for sets of pairs in a tree. + + Parameters + ---------- + G : NetworkX directed graph (must be a tree) + + root : node, optional (default: None) + The root of the subtree to operate on. + If None, assume the entire graph has exactly one source and use that. + + pairs : iterable or iterator of pairs of nodes, optional (default: None) + The pairs of interest. If None, Defaults to all pairs of nodes + under `root` that have a lowest common ancestor. + + Returns + ------- + lcas : generator of tuples `((u, v), lca)` where `u` and `v` are nodes + in `pairs` and `lca` is their lowest common ancestor. + + Examples + -------- + >>> import pprint + >>> G = nx.DiGraph([(1, 3), (2, 4), (1, 2)]) + >>> pprint.pprint(dict(nx.tree_all_pairs_lowest_common_ancestor(G))) + {(1, 1): 1, + (2, 1): 1, + (2, 2): 2, + (3, 1): 1, + (3, 2): 1, + (3, 3): 3, + (3, 4): 1, + (4, 1): 1, + (4, 2): 2, + (4, 4): 4} + + We can also use `pairs` argument to specify the pairs of nodes for which we + want to compute lowest common ancestors. Here is an example: + + >>> dict(nx.tree_all_pairs_lowest_common_ancestor(G, pairs=[(1, 4), (2, 3)])) + {(2, 3): 1, (1, 4): 1} + + Notes + ----- + Only defined on non-null trees represented with directed edges from + parents to children. Uses Tarjan's off-line lowest-common-ancestors + algorithm. Runs in time $O(4 \times (V + E + P))$ time, where 4 is the largest + value of the inverse Ackermann function likely to ever come up in actual + use, and $P$ is the number of pairs requested (or $V^2$ if all are needed). + + Tarjan, R. E. (1979), "Applications of path compression on balanced trees", + Journal of the ACM 26 (4): 690-715, doi:10.1145/322154.322161. + + See Also + -------- + all_pairs_lowest_common_ancestor: similar routine for general DAGs + lowest_common_ancestor: just a single pair for general DAGs + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.") + + # Index pairs of interest for efficient lookup from either side. + if pairs is not None: + pair_dict = defaultdict(set) + # See note on all_pairs_lowest_common_ancestor. + if not isinstance(pairs, Mapping | Set): + pairs = set(pairs) + for u, v in pairs: + for n in (u, v): + if n not in G: + msg = f"The node {str(n)} is not in the digraph." + raise nx.NodeNotFound(msg) + pair_dict[u].add(v) + pair_dict[v].add(u) + + # If root is not specified, find the exactly one node with in degree 0 and + # use it. Raise an error if none are found, or more than one is. Also check + # for any nodes with in degree larger than 1, which would imply G is not a + # tree. + if root is None: + for n, deg in G.in_degree: + if deg == 0: + if root is not None: + msg = "No root specified and tree has multiple sources." + raise nx.NetworkXError(msg) + root = n + # checking deg>1 is not sufficient for MultiDiGraphs + elif deg > 1 and len(G.pred[n]) > 1: + msg = "Tree LCA only defined on trees; use DAG routine." + raise nx.NetworkXError(msg) + if root is None: + raise nx.NetworkXError("Graph contains a cycle.") + + # Iterative implementation of Tarjan's offline lca algorithm + # as described in CLRS on page 521 (2nd edition)/page 584 (3rd edition) + uf = UnionFind() + ancestors = {} + for node in G: + ancestors[node] = uf[node] + + colors = defaultdict(bool) + for node in nx.dfs_postorder_nodes(G, root): + colors[node] = True + for v in pair_dict[node] if pairs is not None else G: + if colors[v]: + # If the user requested both directions of a pair, give it. + # Otherwise, just give one. + if pairs is not None and (node, v) in pairs: + yield (node, v), ancestors[uf[v]] + if pairs is None or (v, node) in pairs: + yield (v, node), ancestors[uf[v]] + if node != root: + parent = arbitrary_element(G.pred[node]) + uf.union(parent, node) + ancestors[uf[parent]] = parent diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/matching.py new file mode 100644 index 0000000..7d9fc58 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/matching.py @@ -0,0 +1,1151 @@ +"""Functions for computing and verifying matchings in a graph.""" + +from itertools import combinations, repeat + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "is_matching", + "is_maximal_matching", + "is_perfect_matching", + "max_weight_matching", + "min_weight_matching", + "maximal_matching", +] + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +@nx._dispatchable +def maximal_matching(G): + r"""Find a maximal matching in the graph. + + A matching is a subset of edges in which no node occurs more than once. + A maximal matching cannot add more edges and still be a matching. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + Returns + ------- + matching : set + A maximal matching of the graph. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5)]) + >>> sorted(nx.maximal_matching(G)) + [(1, 2), (3, 5)] + + Notes + ----- + The algorithm greedily selects a maximal matching M of the graph G + (i.e. no superset of M exists). It runs in $O(|E|)$ time. + """ + matching = set() + nodes = set() + for edge in G.edges(): + # If the edge isn't covered, add it to the matching + # then remove neighborhood of u and v from consideration. + u, v = edge + if u not in nodes and v not in nodes and u != v: + matching.add(edge) + nodes.update(edge) + return matching + + +def matching_dict_to_set(matching): + """Converts matching dict format to matching set format + + Converts a dictionary representing a matching (as returned by + :func:`max_weight_matching`) to a set representing a matching (as + returned by :func:`maximal_matching`). + + In the definition of maximal matching adopted by NetworkX, + self-loops are not allowed, so the provided dictionary is expected + to never have any mapping from a key to itself. However, the + dictionary is expected to have mirrored key/value pairs, for + example, key ``u`` with value ``v`` and key ``v`` with value ``u``. + + """ + edges = set() + for edge in matching.items(): + u, v = edge + if (v, u) in edges or edge in edges: + continue + if u == v: + raise nx.NetworkXError(f"Selfloops cannot appear in matchings {edge}") + edges.add(edge) + return edges + + +@nx._dispatchable +def is_matching(G, matching): + """Return True if ``matching`` is a valid matching of ``G`` + + A *matching* in a graph is a set of edges in which no two distinct + edges share a common endpoint. Each node is incident to at most one + edge in the matching. The edges are said to be independent. + + Parameters + ---------- + G : NetworkX graph + + matching : dict or set + A dictionary or set representing a matching. If a dictionary, it + must have ``matching[u] == v`` and ``matching[v] == u`` for each + edge ``(u, v)`` in the matching. If a set, it must have elements + of the form ``(u, v)``, where ``(u, v)`` is an edge in the + matching. + + Returns + ------- + bool + Whether the given set or dictionary represents a valid matching + in the graph. + + Raises + ------ + NetworkXError + If the proposed matching has an edge to a node not in G. + Or if the matching is not a collection of 2-tuple edges. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5)]) + >>> nx.is_maximal_matching(G, {1: 3, 2: 4}) # using dict to represent matching + True + + >>> nx.is_matching(G, {(1, 3), (2, 4)}) # using set to represent matching + True + + """ + if isinstance(matching, dict): + matching = matching_dict_to_set(matching) + + nodes = set() + for edge in matching: + if len(edge) != 2: + raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}") + u, v = edge + if u not in G or v not in G: + raise nx.NetworkXError(f"matching contains edge {edge} with node not in G") + if u == v: + return False + if not G.has_edge(u, v): + return False + if u in nodes or v in nodes: + return False + nodes.update(edge) + return True + + +@nx._dispatchable +def is_maximal_matching(G, matching): + """Return True if ``matching`` is a maximal matching of ``G`` + + A *maximal matching* in a graph is a matching in which adding any + edge would cause the set to no longer be a valid matching. + + Parameters + ---------- + G : NetworkX graph + + matching : dict or set + A dictionary or set representing a matching. If a dictionary, it + must have ``matching[u] == v`` and ``matching[v] == u`` for each + edge ``(u, v)`` in the matching. If a set, it must have elements + of the form ``(u, v)``, where ``(u, v)`` is an edge in the + matching. + + Returns + ------- + bool + Whether the given set or dictionary represents a valid maximal + matching in the graph. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)]) + >>> nx.is_maximal_matching(G, {(1, 2), (3, 4)}) + True + + """ + if isinstance(matching, dict): + matching = matching_dict_to_set(matching) + # If the given set is not a matching, then it is not a maximal matching. + edges = set() + nodes = set() + for edge in matching: + if len(edge) != 2: + raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}") + u, v = edge + if u not in G or v not in G: + raise nx.NetworkXError(f"matching contains edge {edge} with node not in G") + if u == v: + return False + if not G.has_edge(u, v): + return False + if u in nodes or v in nodes: + return False + nodes.update(edge) + edges.add(edge) + edges.add((v, u)) + # A matching is maximal if adding any new edge from G to it + # causes the resulting set to match some node twice. + # Be careful to check for adding selfloops + for u, v in G.edges: + if (u, v) not in edges: + # could add edge (u, v) to edges and have a bigger matching + if u not in nodes and v not in nodes and u != v: + return False + return True + + +@nx._dispatchable +def is_perfect_matching(G, matching): + """Return True if ``matching`` is a perfect matching for ``G`` + + A *perfect matching* in a graph is a matching in which exactly one edge + is incident upon each vertex. + + Parameters + ---------- + G : NetworkX graph + + matching : dict or set + A dictionary or set representing a matching. If a dictionary, it + must have ``matching[u] == v`` and ``matching[v] == u`` for each + edge ``(u, v)`` in the matching. If a set, it must have elements + of the form ``(u, v)``, where ``(u, v)`` is an edge in the + matching. + + Returns + ------- + bool + Whether the given set or dictionary represents a valid perfect + matching in the graph. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (2, 4), (3, 5), (4, 5), (4, 6)]) + >>> my_match = {1: 2, 3: 5, 4: 6} + >>> nx.is_perfect_matching(G, my_match) + True + + """ + if isinstance(matching, dict): + matching = matching_dict_to_set(matching) + + nodes = set() + for edge in matching: + if len(edge) != 2: + raise nx.NetworkXError(f"matching has non-2-tuple edge {edge}") + u, v = edge + if u not in G or v not in G: + raise nx.NetworkXError(f"matching contains edge {edge} with node not in G") + if u == v: + return False + if not G.has_edge(u, v): + return False + if u in nodes or v in nodes: + return False + nodes.update(edge) + return len(nodes) == len(G) + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def min_weight_matching(G, weight="weight"): + """Computing a minimum-weight maximal matching of G. + + Use the maximum-weight algorithm with edge weights subtracted + from the maximum weight of all edges. + + A matching is a subset of edges in which no node occurs more than once. + The weight of a matching is the sum of the weights of its edges. + A maximal matching cannot add more edges and still be a matching. + The cardinality of a matching is the number of matched edges. + + This method replaces the edge weights with 1 plus the maximum edge weight + minus the original edge weight. + + new_weight = (max_weight + 1) - edge_weight + + then runs :func:`max_weight_matching` with the new weights. + The max weight matching with these new weights corresponds + to the min weight matching using the original weights. + Adding 1 to the max edge weight keeps all edge weights positive + and as integers if they started as integers. + + You might worry that adding 1 to each weight would make the algorithm + favor matchings with more edges. But we use the parameter + `maxcardinality=True` in `max_weight_matching` to ensure that the + number of edges in the competing matchings are the same and thus + the optimum does not change due to changes in the number of edges. + + Read the documentation of `max_weight_matching` for more information. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + weight: string, optional (default='weight') + Edge data key corresponding to the edge weight. + If key not found, uses 1 as weight. + + Returns + ------- + matching : set + A minimal weight matching of the graph. + + See Also + -------- + max_weight_matching + """ + if len(G.edges) == 0: + return max_weight_matching(G, maxcardinality=True, weight=weight) + G_edges = G.edges(data=weight, default=1) + max_weight = 1 + max(w for _, _, w in G_edges) + InvG = nx.Graph() + edges = ((u, v, max_weight - w) for u, v, w in G_edges) + InvG.add_weighted_edges_from(edges, weight=weight) + return max_weight_matching(InvG, maxcardinality=True, weight=weight) + + +@not_implemented_for("multigraph") +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight") +def max_weight_matching(G, maxcardinality=False, weight="weight"): + """Compute a maximum-weighted matching of G. + + A matching is a subset of edges in which no node occurs more than once. + The weight of a matching is the sum of the weights of its edges. + A maximal matching cannot add more edges and still be a matching. + The cardinality of a matching is the number of matched edges. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + maxcardinality: bool, optional (default=False) + If maxcardinality is True, compute the maximum-cardinality matching + with maximum weight among all maximum-cardinality matchings. + + weight: string, optional (default='weight') + Edge data key corresponding to the edge weight. + If key not found, uses 1 as weight. + + + Returns + ------- + matching : set + A maximal matching of the graph. + + Examples + -------- + >>> G = nx.Graph() + >>> edges = [(1, 2, 6), (1, 3, 2), (2, 3, 1), (2, 4, 7), (3, 5, 9), (4, 5, 3)] + >>> G.add_weighted_edges_from(edges) + >>> sorted(nx.max_weight_matching(G)) + [(2, 4), (5, 3)] + + Notes + ----- + If G has edges with weight attributes the edge data are used as + weight values else the weights are assumed to be 1. + + This function takes time O(number_of_nodes ** 3). + + If all edge weights are integers, the algorithm uses only integer + computations. If floating point weights are used, the algorithm + could return a slightly suboptimal matching due to numeric + precision errors. + + This method is based on the "blossom" method for finding augmenting + paths and the "primal-dual" method for finding a matching of maximum + weight, both methods invented by Jack Edmonds [1]_. + + Bipartite graphs can also be matched using the functions present in + :mod:`networkx.algorithms.bipartite.matching`. + + References + ---------- + .. [1] "Efficient Algorithms for Finding Maximum Matching in Graphs", + Zvi Galil, ACM Computing Surveys, 1986. + """ + # + # The algorithm is taken from "Efficient Algorithms for Finding Maximum + # Matching in Graphs" by Zvi Galil, ACM Computing Surveys, 1986. + # It is based on the "blossom" method for finding augmenting paths and + # the "primal-dual" method for finding a matching of maximum weight, both + # methods invented by Jack Edmonds. + # + # A C program for maximum weight matching by Ed Rothberg was used + # extensively to validate this new code. + # + # Many terms used in the code comments are explained in the paper + # by Galil. You will probably need the paper to make sense of this code. + # + + class NoNode: + """Dummy value which is different from any node.""" + + class Blossom: + """Representation of a non-trivial blossom or sub-blossom.""" + + __slots__ = ["childs", "edges", "mybestedges"] + + # b.childs is an ordered list of b's sub-blossoms, starting with + # the base and going round the blossom. + + # b.edges is the list of b's connecting edges, such that + # b.edges[i] = (v, w) where v is a vertex in b.childs[i] + # and w is a vertex in b.childs[wrap(i+1)]. + + # If b is a top-level S-blossom, + # b.mybestedges is a list of least-slack edges to neighboring + # S-blossoms, or None if no such list has been computed yet. + # This is used for efficient computation of delta3. + + # Generate the blossom's leaf vertices. + def leaves(self): + stack = [*self.childs] + while stack: + t = stack.pop() + if isinstance(t, Blossom): + stack.extend(t.childs) + else: + yield t + + # Get a list of vertices. + gnodes = list(G) + if not gnodes: + return set() # don't bother with empty graphs + + # Find the maximum edge weight. + maxweight = 0 + allinteger = True + for i, j, d in G.edges(data=True): + wt = d.get(weight, 1) + if i != j and wt > maxweight: + maxweight = wt + allinteger = allinteger and (str(type(wt)).split("'")[1] in ("int", "long")) + + # If v is a matched vertex, mate[v] is its partner vertex. + # If v is a single vertex, v does not occur as a key in mate. + # Initially all vertices are single; updated during augmentation. + mate = {} + + # If b is a top-level blossom, + # label.get(b) is None if b is unlabeled (free), + # 1 if b is an S-blossom, + # 2 if b is a T-blossom. + # The label of a vertex is found by looking at the label of its top-level + # containing blossom. + # If v is a vertex inside a T-blossom, label[v] is 2 iff v is reachable + # from an S-vertex outside the blossom. + # Labels are assigned during a stage and reset after each augmentation. + label = {} + + # If b is a labeled top-level blossom, + # labeledge[b] = (v, w) is the edge through which b obtained its label + # such that w is a vertex in b, or None if b's base vertex is single. + # If w is a vertex inside a T-blossom and label[w] == 2, + # labeledge[w] = (v, w) is an edge through which w is reachable from + # outside the blossom. + labeledge = {} + + # If v is a vertex, inblossom[v] is the top-level blossom to which v + # belongs. + # If v is a top-level vertex, inblossom[v] == v since v is itself + # a (trivial) top-level blossom. + # Initially all vertices are top-level trivial blossoms. + inblossom = dict(zip(gnodes, gnodes)) + + # If b is a sub-blossom, + # blossomparent[b] is its immediate parent (sub-)blossom. + # If b is a top-level blossom, blossomparent[b] is None. + blossomparent = dict(zip(gnodes, repeat(None))) + + # If b is a (sub-)blossom, + # blossombase[b] is its base VERTEX (i.e. recursive sub-blossom). + blossombase = dict(zip(gnodes, gnodes)) + + # If w is a free vertex (or an unreached vertex inside a T-blossom), + # bestedge[w] = (v, w) is the least-slack edge from an S-vertex, + # or None if there is no such edge. + # If b is a (possibly trivial) top-level S-blossom, + # bestedge[b] = (v, w) is the least-slack edge to a different S-blossom + # (v inside b), or None if there is no such edge. + # This is used for efficient computation of delta2 and delta3. + bestedge = {} + + # If v is a vertex, + # dualvar[v] = 2 * u(v) where u(v) is the v's variable in the dual + # optimization problem (if all edge weights are integers, multiplication + # by two ensures that all values remain integers throughout the algorithm). + # Initially, u(v) = maxweight / 2. + dualvar = dict(zip(gnodes, repeat(maxweight))) + + # If b is a non-trivial blossom, + # blossomdual[b] = z(b) where z(b) is b's variable in the dual + # optimization problem. + blossomdual = {} + + # If (v, w) in allowedge or (w, v) in allowedg, then the edge + # (v, w) is known to have zero slack in the optimization problem; + # otherwise the edge may or may not have zero slack. + allowedge = {} + + # Queue of newly discovered S-vertices. + queue = [] + + # Return 2 * slack of edge (v, w) (does not work inside blossoms). + def slack(v, w): + return dualvar[v] + dualvar[w] - 2 * G[v][w].get(weight, 1) + + # Assign label t to the top-level blossom containing vertex w, + # coming through an edge from vertex v. + def assignLabel(w, t, v): + b = inblossom[w] + assert label.get(w) is None and label.get(b) is None + label[w] = label[b] = t + if v is not None: + labeledge[w] = labeledge[b] = (v, w) + else: + labeledge[w] = labeledge[b] = None + bestedge[w] = bestedge[b] = None + if t == 1: + # b became an S-vertex/blossom; add it(s vertices) to the queue. + if isinstance(b, Blossom): + queue.extend(b.leaves()) + else: + queue.append(b) + elif t == 2: + # b became a T-vertex/blossom; assign label S to its mate. + # (If b is a non-trivial blossom, its base is the only vertex + # with an external mate.) + base = blossombase[b] + assignLabel(mate[base], 1, base) + + # Trace back from vertices v and w to discover either a new blossom + # or an augmenting path. Return the base vertex of the new blossom, + # or NoNode if an augmenting path was found. + def scanBlossom(v, w): + # Trace back from v and w, placing breadcrumbs as we go. + path = [] + base = NoNode + while v is not NoNode: + # Look for a breadcrumb in v's blossom or put a new breadcrumb. + b = inblossom[v] + if label[b] & 4: + base = blossombase[b] + break + assert label[b] == 1 + path.append(b) + label[b] = 5 + # Trace one step back. + if labeledge[b] is None: + # The base of blossom b is single; stop tracing this path. + assert blossombase[b] not in mate + v = NoNode + else: + assert labeledge[b][0] == mate[blossombase[b]] + v = labeledge[b][0] + b = inblossom[v] + assert label[b] == 2 + # b is a T-blossom; trace one more step back. + v = labeledge[b][0] + # Swap v and w so that we alternate between both paths. + if w is not NoNode: + v, w = w, v + # Remove breadcrumbs. + for b in path: + label[b] = 1 + # Return base vertex, if we found one. + return base + + # Construct a new blossom with given base, through S-vertices v and w. + # Label the new blossom as S; set its dual variable to zero; + # relabel its T-vertices to S and add them to the queue. + def addBlossom(base, v, w): + bb = inblossom[base] + bv = inblossom[v] + bw = inblossom[w] + # Create blossom. + b = Blossom() + blossombase[b] = base + blossomparent[b] = None + blossomparent[bb] = b + # Make list of sub-blossoms and their interconnecting edge endpoints. + b.childs = path = [] + b.edges = edgs = [(v, w)] + # Trace back from v to base. + while bv != bb: + # Add bv to the new blossom. + blossomparent[bv] = b + path.append(bv) + edgs.append(labeledge[bv]) + assert label[bv] == 2 or ( + label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]] + ) + # Trace one step back. + v = labeledge[bv][0] + bv = inblossom[v] + # Add base sub-blossom; reverse lists. + path.append(bb) + path.reverse() + edgs.reverse() + # Trace back from w to base. + while bw != bb: + # Add bw to the new blossom. + blossomparent[bw] = b + path.append(bw) + edgs.append((labeledge[bw][1], labeledge[bw][0])) + assert label[bw] == 2 or ( + label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]] + ) + # Trace one step back. + w = labeledge[bw][0] + bw = inblossom[w] + # Set label to S. + assert label[bb] == 1 + label[b] = 1 + labeledge[b] = labeledge[bb] + # Set dual variable to zero. + blossomdual[b] = 0 + # Relabel vertices. + for v in b.leaves(): + if label[inblossom[v]] == 2: + # This T-vertex now turns into an S-vertex because it becomes + # part of an S-blossom; add it to the queue. + queue.append(v) + inblossom[v] = b + # Compute b.mybestedges. + bestedgeto = {} + for bv in path: + if isinstance(bv, Blossom): + if bv.mybestedges is not None: + # Walk this subblossom's least-slack edges. + nblist = bv.mybestedges + # The sub-blossom won't need this data again. + bv.mybestedges = None + else: + # This subblossom does not have a list of least-slack + # edges; get the information from the vertices. + nblist = [ + (v, w) for v in bv.leaves() for w in G.neighbors(v) if v != w + ] + else: + nblist = [(bv, w) for w in G.neighbors(bv) if bv != w] + for k in nblist: + (i, j) = k + if inblossom[j] == b: + i, j = j, i + bj = inblossom[j] + if ( + bj != b + and label.get(bj) == 1 + and ((bj not in bestedgeto) or slack(i, j) < slack(*bestedgeto[bj])) + ): + bestedgeto[bj] = k + # Forget about least-slack edge of the subblossom. + bestedge[bv] = None + b.mybestedges = list(bestedgeto.values()) + # Select bestedge[b]. + mybestedge = None + bestedge[b] = None + for k in b.mybestedges: + kslack = slack(*k) + if mybestedge is None or kslack < mybestslack: + mybestedge = k + mybestslack = kslack + bestedge[b] = mybestedge + + # Expand the given top-level blossom. + def expandBlossom(b, endstage): + # This is an obnoxiously complicated recursive function for the sake of + # a stack-transformation. So, we hack around the complexity by using + # a trampoline pattern. By yielding the arguments to each recursive + # call, we keep the actual callstack flat. + + def _recurse(b, endstage): + # Convert sub-blossoms into top-level blossoms. + for s in b.childs: + blossomparent[s] = None + if isinstance(s, Blossom): + if endstage and blossomdual[s] == 0: + # Recursively expand this sub-blossom. + yield s + else: + for v in s.leaves(): + inblossom[v] = s + else: + inblossom[s] = s + # If we expand a T-blossom during a stage, its sub-blossoms must be + # relabeled. + if (not endstage) and label.get(b) == 2: + # Start at the sub-blossom through which the expanding + # blossom obtained its label, and relabel sub-blossoms untili + # we reach the base. + # Figure out through which sub-blossom the expanding blossom + # obtained its label initially. + entrychild = inblossom[labeledge[b][1]] + # Decide in which direction we will go round the blossom. + j = b.childs.index(entrychild) + if j & 1: + # Start index is odd; go forward and wrap. + j -= len(b.childs) + jstep = 1 + else: + # Start index is even; go backward. + jstep = -1 + # Move along the blossom until we get to the base. + v, w = labeledge[b] + while j != 0: + # Relabel the T-sub-blossom. + if jstep == 1: + p, q = b.edges[j] + else: + q, p = b.edges[j - 1] + label[w] = None + label[q] = None + assignLabel(w, 2, v) + # Step to the next S-sub-blossom and note its forward edge. + allowedge[(p, q)] = allowedge[(q, p)] = True + j += jstep + if jstep == 1: + v, w = b.edges[j] + else: + w, v = b.edges[j - 1] + # Step to the next T-sub-blossom. + allowedge[(v, w)] = allowedge[(w, v)] = True + j += jstep + # Relabel the base T-sub-blossom WITHOUT stepping through to + # its mate (so don't call assignLabel). + bw = b.childs[j] + label[w] = label[bw] = 2 + labeledge[w] = labeledge[bw] = (v, w) + bestedge[bw] = None + # Continue along the blossom until we get back to entrychild. + j += jstep + while b.childs[j] != entrychild: + # Examine the vertices of the sub-blossom to see whether + # it is reachable from a neighboring S-vertex outside the + # expanding blossom. + bv = b.childs[j] + if label.get(bv) == 1: + # This sub-blossom just got label S through one of its + # neighbors; leave it be. + j += jstep + continue + if isinstance(bv, Blossom): + for v in bv.leaves(): + if label.get(v): + break + else: + v = bv + # If the sub-blossom contains a reachable vertex, assign + # label T to the sub-blossom. + if label.get(v): + assert label[v] == 2 + assert inblossom[v] == bv + label[v] = None + label[mate[blossombase[bv]]] = None + assignLabel(v, 2, labeledge[v][0]) + j += jstep + # Remove the expanded blossom entirely. + label.pop(b, None) + labeledge.pop(b, None) + bestedge.pop(b, None) + del blossomparent[b] + del blossombase[b] + del blossomdual[b] + + # Now, we apply the trampoline pattern. We simulate a recursive + # callstack by maintaining a stack of generators, each yielding a + # sequence of function arguments. We grow the stack by appending a call + # to _recurse on each argument tuple, and shrink the stack whenever a + # generator is exhausted. + stack = [_recurse(b, endstage)] + while stack: + top = stack[-1] + for s in top: + stack.append(_recurse(s, endstage)) + break + else: + stack.pop() + + # Swap matched/unmatched edges over an alternating path through blossom b + # between vertex v and the base vertex. Keep blossom bookkeeping + # consistent. + def augmentBlossom(b, v): + # This is an obnoxiously complicated recursive function for the sake of + # a stack-transformation. So, we hack around the complexity by using + # a trampoline pattern. By yielding the arguments to each recursive + # call, we keep the actual callstack flat. + + def _recurse(b, v): + # Bubble up through the blossom tree from vertex v to an immediate + # sub-blossom of b. + t = v + while blossomparent[t] != b: + t = blossomparent[t] + # Recursively deal with the first sub-blossom. + if isinstance(t, Blossom): + yield (t, v) + # Decide in which direction we will go round the blossom. + i = j = b.childs.index(t) + if i & 1: + # Start index is odd; go forward and wrap. + j -= len(b.childs) + jstep = 1 + else: + # Start index is even; go backward. + jstep = -1 + # Move along the blossom until we get to the base. + while j != 0: + # Step to the next sub-blossom and augment it recursively. + j += jstep + t = b.childs[j] + if jstep == 1: + w, x = b.edges[j] + else: + x, w = b.edges[j - 1] + if isinstance(t, Blossom): + yield (t, w) + # Step to the next sub-blossom and augment it recursively. + j += jstep + t = b.childs[j] + if isinstance(t, Blossom): + yield (t, x) + # Match the edge connecting those sub-blossoms. + mate[w] = x + mate[x] = w + # Rotate the list of sub-blossoms to put the new base at the front. + b.childs = b.childs[i:] + b.childs[:i] + b.edges = b.edges[i:] + b.edges[:i] + blossombase[b] = blossombase[b.childs[0]] + assert blossombase[b] == v + + # Now, we apply the trampoline pattern. We simulate a recursive + # callstack by maintaining a stack of generators, each yielding a + # sequence of function arguments. We grow the stack by appending a call + # to _recurse on each argument tuple, and shrink the stack whenever a + # generator is exhausted. + stack = [_recurse(b, v)] + while stack: + top = stack[-1] + for args in top: + stack.append(_recurse(*args)) + break + else: + stack.pop() + + # Swap matched/unmatched edges over an alternating path between two + # single vertices. The augmenting path runs through S-vertices v and w. + def augmentMatching(v, w): + for s, j in ((v, w), (w, v)): + # Match vertex s to vertex j. Then trace back from s + # until we find a single vertex, swapping matched and unmatched + # edges as we go. + while 1: + bs = inblossom[s] + assert label[bs] == 1 + assert (labeledge[bs] is None and blossombase[bs] not in mate) or ( + labeledge[bs][0] == mate[blossombase[bs]] + ) + # Augment through the S-blossom from s to base. + if isinstance(bs, Blossom): + augmentBlossom(bs, s) + # Update mate[s] + mate[s] = j + # Trace one step back. + if labeledge[bs] is None: + # Reached single vertex; stop. + break + t = labeledge[bs][0] + bt = inblossom[t] + assert label[bt] == 2 + # Trace one more step back. + s, j = labeledge[bt] + # Augment through the T-blossom from j to base. + assert blossombase[bt] == t + if isinstance(bt, Blossom): + augmentBlossom(bt, j) + # Update mate[j] + mate[j] = s + + # Verify that the optimum solution has been reached. + def verifyOptimum(): + if maxcardinality: + # Vertices may have negative dual; + # find a constant non-negative number to add to all vertex duals. + vdualoffset = max(0, -min(dualvar.values())) + else: + vdualoffset = 0 + # 0. all dual variables are non-negative + assert min(dualvar.values()) + vdualoffset >= 0 + assert len(blossomdual) == 0 or min(blossomdual.values()) >= 0 + # 0. all edges have non-negative slack and + # 1. all matched edges have zero slack; + for i, j, d in G.edges(data=True): + wt = d.get(weight, 1) + if i == j: + continue # ignore self-loops + s = dualvar[i] + dualvar[j] - 2 * wt + iblossoms = [i] + jblossoms = [j] + while blossomparent[iblossoms[-1]] is not None: + iblossoms.append(blossomparent[iblossoms[-1]]) + while blossomparent[jblossoms[-1]] is not None: + jblossoms.append(blossomparent[jblossoms[-1]]) + iblossoms.reverse() + jblossoms.reverse() + for bi, bj in zip(iblossoms, jblossoms): + if bi != bj: + break + s += 2 * blossomdual[bi] + assert s >= 0 + if mate.get(i) == j or mate.get(j) == i: + assert mate[i] == j and mate[j] == i + assert s == 0 + # 2. all single vertices have zero dual value; + for v in gnodes: + assert (v in mate) or dualvar[v] + vdualoffset == 0 + # 3. all blossoms with positive dual value are full. + for b in blossomdual: + if blossomdual[b] > 0: + assert len(b.edges) % 2 == 1 + for i, j in b.edges[1::2]: + assert mate[i] == j and mate[j] == i + # Ok. + + # Main loop: continue until no further improvement is possible. + while 1: + # Each iteration of this loop is a "stage". + # A stage finds an augmenting path and uses that to improve + # the matching. + + # Remove labels from top-level blossoms/vertices. + label.clear() + labeledge.clear() + + # Forget all about least-slack edges. + bestedge.clear() + for b in blossomdual: + b.mybestedges = None + + # Loss of labeling means that we can not be sure that currently + # allowable edges remain allowable throughout this stage. + allowedge.clear() + + # Make queue empty. + queue[:] = [] + + # Label single blossoms/vertices with S and put them in the queue. + for v in gnodes: + if (v not in mate) and label.get(inblossom[v]) is None: + assignLabel(v, 1, None) + + # Loop until we succeed in augmenting the matching. + augmented = 0 + while 1: + # Each iteration of this loop is a "substage". + # A substage tries to find an augmenting path; + # if found, the path is used to improve the matching and + # the stage ends. If there is no augmenting path, the + # primal-dual method is used to pump some slack out of + # the dual variables. + + # Continue labeling until all vertices which are reachable + # through an alternating path have got a label. + while queue and not augmented: + # Take an S vertex from the queue. + v = queue.pop() + assert label[inblossom[v]] == 1 + + # Scan its neighbors: + for w in G.neighbors(v): + if w == v: + continue # ignore self-loops + # w is a neighbor to v + bv = inblossom[v] + bw = inblossom[w] + if bv == bw: + # this edge is internal to a blossom; ignore it + continue + if (v, w) not in allowedge: + kslack = slack(v, w) + if kslack <= 0: + # edge k has zero slack => it is allowable + allowedge[(v, w)] = allowedge[(w, v)] = True + if (v, w) in allowedge: + if label.get(bw) is None: + # (C1) w is a free vertex; + # label w with T and label its mate with S (R12). + assignLabel(w, 2, v) + elif label.get(bw) == 1: + # (C2) w is an S-vertex (not in the same blossom); + # follow back-links to discover either an + # augmenting path or a new blossom. + base = scanBlossom(v, w) + if base is not NoNode: + # Found a new blossom; add it to the blossom + # bookkeeping and turn it into an S-blossom. + addBlossom(base, v, w) + else: + # Found an augmenting path; augment the + # matching and end this stage. + augmentMatching(v, w) + augmented = 1 + break + elif label.get(w) is None: + # w is inside a T-blossom, but w itself has not + # yet been reached from outside the blossom; + # mark it as reached (we need this to relabel + # during T-blossom expansion). + assert label[bw] == 2 + label[w] = 2 + labeledge[w] = (v, w) + elif label.get(bw) == 1: + # keep track of the least-slack non-allowable edge to + # a different S-blossom. + if bestedge.get(bv) is None or kslack < slack(*bestedge[bv]): + bestedge[bv] = (v, w) + elif label.get(w) is None: + # w is a free vertex (or an unreached vertex inside + # a T-blossom) but we can not reach it yet; + # keep track of the least-slack edge that reaches w. + if bestedge.get(w) is None or kslack < slack(*bestedge[w]): + bestedge[w] = (v, w) + + if augmented: + break + + # There is no augmenting path under these constraints; + # compute delta and reduce slack in the optimization problem. + # (Note that our vertex dual variables, edge slacks and delta's + # are pre-multiplied by two.) + deltatype = -1 + delta = deltaedge = deltablossom = None + + # Compute delta1: the minimum value of any vertex dual. + if not maxcardinality: + deltatype = 1 + delta = min(dualvar.values()) + + # Compute delta2: the minimum slack on any edge between + # an S-vertex and a free vertex. + for v in G.nodes(): + if label.get(inblossom[v]) is None and bestedge.get(v) is not None: + d = slack(*bestedge[v]) + if deltatype == -1 or d < delta: + delta = d + deltatype = 2 + deltaedge = bestedge[v] + + # Compute delta3: half the minimum slack on any edge between + # a pair of S-blossoms. + for b in blossomparent: + if ( + blossomparent[b] is None + and label.get(b) == 1 + and bestedge.get(b) is not None + ): + kslack = slack(*bestedge[b]) + if allinteger: + assert (kslack % 2) == 0 + d = kslack // 2 + else: + d = kslack / 2.0 + if deltatype == -1 or d < delta: + delta = d + deltatype = 3 + deltaedge = bestedge[b] + + # Compute delta4: minimum z variable of any T-blossom. + for b in blossomdual: + if ( + blossomparent[b] is None + and label.get(b) == 2 + and (deltatype == -1 or blossomdual[b] < delta) + ): + delta = blossomdual[b] + deltatype = 4 + deltablossom = b + + if deltatype == -1: + # No further improvement possible; max-cardinality optimum + # reached. Do a final delta update to make the optimum + # verifiable. + assert maxcardinality + deltatype = 1 + delta = max(0, min(dualvar.values())) + + # Update dual variables according to delta. + for v in gnodes: + if label.get(inblossom[v]) == 1: + # S-vertex: 2*u = 2*u - 2*delta + dualvar[v] -= delta + elif label.get(inblossom[v]) == 2: + # T-vertex: 2*u = 2*u + 2*delta + dualvar[v] += delta + for b in blossomdual: + if blossomparent[b] is None: + if label.get(b) == 1: + # top-level S-blossom: z = z + 2*delta + blossomdual[b] += delta + elif label.get(b) == 2: + # top-level T-blossom: z = z - 2*delta + blossomdual[b] -= delta + + # Take action at the point where minimum delta occurred. + if deltatype == 1: + # No further improvement possible; optimum reached. + break + elif deltatype == 2: + # Use the least-slack edge to continue the search. + (v, w) = deltaedge + assert label[inblossom[v]] == 1 + allowedge[(v, w)] = allowedge[(w, v)] = True + queue.append(v) + elif deltatype == 3: + # Use the least-slack edge to continue the search. + (v, w) = deltaedge + allowedge[(v, w)] = allowedge[(w, v)] = True + assert label[inblossom[v]] == 1 + queue.append(v) + elif deltatype == 4: + # Expand the least-z blossom. + expandBlossom(deltablossom, False) + + # End of a this substage. + + # Paranoia check that the matching is symmetric. + for v in mate: + assert mate[mate[v]] == v + + # Stop when no more augmenting path can be found. + if not augmented: + break + + # End of a stage; expand all S-blossoms which have zero dual. + for b in list(blossomdual.keys()): + if b not in blossomdual: + continue # already expanded + if blossomparent[b] is None and label.get(b) == 1 and blossomdual[b] == 0: + expandBlossom(b, True) + + # Verify that we reached the optimum solution (only for integer weights). + if allinteger: + verifyOptimum() + + return matching_dict_to_set(mate) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__init__.py new file mode 100644 index 0000000..cf15ddb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__init__.py @@ -0,0 +1,27 @@ +""" +Subpackages related to graph-minor problems. + +In graph theory, an undirected graph H is called a minor of the graph G if H +can be formed from G by deleting edges and vertices and by contracting edges +[1]_. + +References +---------- +.. [1] https://en.wikipedia.org/wiki/Graph_minor +""" + +from networkx.algorithms.minors.contraction import ( + contracted_edge, + contracted_nodes, + equivalence_classes, + identified_nodes, + quotient_graph, +) + +__all__ = [ + "contracted_edge", + "contracted_nodes", + "equivalence_classes", + "identified_nodes", + "quotient_graph", +] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c9107eb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__pycache__/contraction.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__pycache__/contraction.cpython-312.pyc new file mode 100644 index 0000000..d0828bb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/__pycache__/contraction.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/contraction.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/contraction.py new file mode 100644 index 0000000..0d27c5c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/contraction.py @@ -0,0 +1,666 @@ +"""Provides functions for computing minors of a graph.""" + +from itertools import chain, combinations, permutations, product + +import networkx as nx +from networkx import density +from networkx.exception import NetworkXException +from networkx.utils import arbitrary_element + +__all__ = [ + "contracted_edge", + "contracted_nodes", + "equivalence_classes", + "identified_nodes", + "quotient_graph", +] + +chaini = chain.from_iterable + + +def equivalence_classes(iterable, relation): + """Returns equivalence classes of `relation` when applied to `iterable`. + + The equivalence classes, or blocks, consist of objects from `iterable` + which are all equivalent. They are defined to be equivalent if the + `relation` function returns `True` when passed any two objects from that + class, and `False` otherwise. To define an equivalence relation the + function must be reflexive, symmetric and transitive. + + Parameters + ---------- + iterable : list, tuple, or set + An iterable of elements/nodes. + + relation : function + A Boolean-valued function that implements an equivalence relation + (reflexive, symmetric, transitive binary relation) on the elements + of `iterable` - it must take two elements and return `True` if + they are related, or `False` if not. + + Returns + ------- + set of frozensets + A set of frozensets representing the partition induced by the equivalence + relation function `relation` on the elements of `iterable`. Each + member set in the return set represents an equivalence class, or + block, of the partition. + + Duplicate elements will be ignored so it makes the most sense for + `iterable` to be a :class:`set`. + + Notes + ----- + This function does not check that `relation` represents an equivalence + relation. You can check that your equivalence classes provide a partition + using `is_partition`. + + Examples + -------- + Let `X` be the set of integers from `0` to `9`, and consider an equivalence + relation `R` on `X` of congruence modulo `3`: this means that two integers + `x` and `y` in `X` are equivalent under `R` if they leave the same + remainder when divided by `3`, i.e. `(x - y) mod 3 = 0`. + + The equivalence classes of this relation are `{0, 3, 6, 9}`, `{1, 4, 7}`, + `{2, 5, 8}`: `0`, `3`, `6`, `9` are all divisible by `3` and leave zero + remainder; `1`, `4`, `7` leave remainder `1`; while `2`, `5` and `8` leave + remainder `2`. We can see this by calling `equivalence_classes` with + `X` and a function implementation of `R`. + + >>> X = set(range(10)) + >>> def mod3(x, y): + ... return (x - y) % 3 == 0 + >>> equivalence_classes(X, mod3) # doctest: +SKIP + {frozenset({1, 4, 7}), frozenset({8, 2, 5}), frozenset({0, 9, 3, 6})} + """ + # For simplicity of implementation, we initialize the return value as a + # list of lists, then convert it to a set of sets at the end of the + # function. + blocks = [] + # Determine the equivalence class for each element of the iterable. + for y in iterable: + # Each element y must be in *exactly one* equivalence class. + # + # Each block is guaranteed to be non-empty + for block in blocks: + x = arbitrary_element(block) + if relation(x, y): + block.append(y) + break + else: + # If the element y is not part of any known equivalence class, it + # must be in its own, so we create a new singleton equivalence + # class for it. + blocks.append([y]) + return {frozenset(block) for block in blocks} + + +@nx._dispatchable(edge_attrs="weight", returns_graph=True) +def quotient_graph( + G, + partition, + edge_relation=None, + node_data=None, + edge_data=None, + weight="weight", + relabel=False, + create_using=None, +): + """Returns the quotient graph of `G` under the specified equivalence + relation on nodes. + + Parameters + ---------- + G : NetworkX graph + The graph for which to return the quotient graph with the + specified node relation. + + partition : function, or dict or list of lists, tuples or sets + If a function, this function must represent an equivalence + relation on the nodes of `G`. It must take two arguments *u* + and *v* and return True exactly when *u* and *v* are in the + same equivalence class. The equivalence classes form the nodes + in the returned graph. + + If a dict of lists/tuples/sets, the keys can be any meaningful + block labels, but the values must be the block lists/tuples/sets + (one list/tuple/set per block), and the blocks must form a valid + partition of the nodes of the graph. That is, each node must be + in exactly one block of the partition. + + If a list of sets, the list must form a valid partition of + the nodes of the graph. That is, each node must be in exactly + one block of the partition. + + edge_relation : Boolean function with two arguments + This function must represent an edge relation on the *blocks* of + the `partition` of `G`. It must take two arguments, *B* and *C*, + each one a set of nodes, and return True exactly when there should be + an edge joining block *B* to block *C* in the returned graph. + + If `edge_relation` is not specified, it is assumed to be the + following relation. Block *B* is related to block *C* if and + only if some node in *B* is adjacent to some node in *C*, + according to the edge set of `G`. + + node_data : function + This function takes one argument, *B*, a set of nodes in `G`, + and must return a dictionary representing the node data + attributes to set on the node representing *B* in the quotient graph. + If None, the following node attributes will be set: + + * 'graph', the subgraph of the graph `G` that this block + represents, + * 'nnodes', the number of nodes in this block, + * 'nedges', the number of edges within this block, + * 'density', the density of the subgraph of `G` that this + block represents. + + edge_data : function + This function takes two arguments, *B* and *C*, each one a set + of nodes, and must return a dictionary representing the edge + data attributes to set on the edge joining *B* and *C*, should + there be an edge joining *B* and *C* in the quotient graph (if + no such edge occurs in the quotient graph as determined by + `edge_relation`, then the output of this function is ignored). + + If the quotient graph would be a multigraph, this function is + not applied, since the edge data from each edge in the graph + `G` appears in the edges of the quotient graph. + + weight : string or None, optional (default="weight") + The name of an edge attribute that holds the numerical value + used as a weight. If None then each edge has weight 1. + + relabel : bool + If True, relabel the nodes of the quotient graph to be + nonnegative integers. Otherwise, the nodes are identified with + :class:`frozenset` instances representing the blocks given in + `partition`. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + NetworkX graph + The quotient graph of `G` under the equivalence relation + specified by `partition`. If the partition were given as a + list of :class:`set` instances and `relabel` is False, + each node will be a :class:`frozenset` corresponding to the same + :class:`set`. + + Raises + ------ + NetworkXException + If the given partition is not a valid partition of the nodes of + `G`. + + Examples + -------- + The quotient graph of the complete bipartite graph under the "same + neighbors" equivalence relation is `K_2`. Under this relation, two nodes + are equivalent if they are not adjacent but have the same neighbor set. + + >>> G = nx.complete_bipartite_graph(2, 3) + >>> same_neighbors = lambda u, v: (u not in G[v] and v not in G[u] and G[u] == G[v]) + >>> Q = nx.quotient_graph(G, same_neighbors) + >>> K2 = nx.complete_graph(2) + >>> nx.is_isomorphic(Q, K2) + True + + The quotient graph of a directed graph under the "same strongly connected + component" equivalence relation is the condensation of the graph (see + :func:`condensation`). This example comes from the Wikipedia article + *`Strongly connected component`_*. + + >>> G = nx.DiGraph() + >>> edges = [ + ... "ab", + ... "be", + ... "bf", + ... "bc", + ... "cg", + ... "cd", + ... "dc", + ... "dh", + ... "ea", + ... "ef", + ... "fg", + ... "gf", + ... "hd", + ... "hf", + ... ] + >>> G.add_edges_from(tuple(x) for x in edges) + >>> components = list(nx.strongly_connected_components(G)) + >>> sorted(sorted(component) for component in components) + [['a', 'b', 'e'], ['c', 'd', 'h'], ['f', 'g']] + >>> + >>> C = nx.condensation(G, components) + >>> component_of = C.graph["mapping"] + >>> same_component = lambda u, v: component_of[u] == component_of[v] + >>> Q = nx.quotient_graph(G, same_component) + >>> nx.is_isomorphic(C, Q) + True + + Node identification can be represented as the quotient of a graph under the + equivalence relation that places the two nodes in one block and each other + node in its own singleton block. + + >>> K24 = nx.complete_bipartite_graph(2, 4) + >>> K34 = nx.complete_bipartite_graph(3, 4) + >>> C = nx.contracted_nodes(K34, 1, 2) + >>> nodes = {1, 2} + >>> is_contracted = lambda u, v: u in nodes and v in nodes + >>> Q = nx.quotient_graph(K34, is_contracted) + >>> nx.is_isomorphic(Q, C) + True + >>> nx.is_isomorphic(Q, K24) + True + + The blockmodeling technique described in [1]_ can be implemented as a + quotient graph. + + >>> G = nx.path_graph(6) + >>> partition = [{0, 1}, {2, 3}, {4, 5}] + >>> M = nx.quotient_graph(G, partition, relabel=True) + >>> list(M.edges()) + [(0, 1), (1, 2)] + + Here is the sample example but using partition as a dict of block sets. + + >>> G = nx.path_graph(6) + >>> partition = {0: {0, 1}, 2: {2, 3}, 4: {4, 5}} + >>> M = nx.quotient_graph(G, partition, relabel=True) + >>> list(M.edges()) + [(0, 1), (1, 2)] + + Partitions can be represented in various ways: + + 0. a list/tuple/set of block lists/tuples/sets + 1. a dict with block labels as keys and blocks lists/tuples/sets as values + 2. a dict with block lists/tuples/sets as keys and block labels as values + 3. a function from nodes in the original iterable to block labels + 4. an equivalence relation function on the target iterable + + As `quotient_graph` is designed to accept partitions represented as (0), (1) or + (4) only, the `equivalence_classes` function can be used to get the partitions + in the right form, in order to call `quotient_graph`. + + .. _Strongly connected component: https://en.wikipedia.org/wiki/Strongly_connected_component + + References + ---------- + .. [1] Patrick Doreian, Vladimir Batagelj, and Anuska Ferligoj. + *Generalized Blockmodeling*. + Cambridge University Press, 2004. + + """ + # If the user provided an equivalence relation as a function to compute + # the blocks of the partition on the nodes of G induced by the + # equivalence relation. + if callable(partition): + # equivalence_classes always return partition of whole G. + partition = equivalence_classes(G, partition) + if not nx.community.is_partition(G, partition): + raise nx.NetworkXException( + "Input `partition` is not an equivalence relation for nodes of G" + ) + return _quotient_graph( + G, + partition, + edge_relation, + node_data, + edge_data, + weight, + relabel, + create_using, + ) + + # If the partition is a dict, it is assumed to be one where the keys are + # user-defined block labels, and values are block lists, tuples or sets. + if isinstance(partition, dict): + partition = list(partition.values()) + + # If the user provided partition as a collection of sets. Then we + # need to check if partition covers all of G nodes. If the answer + # is 'No' then we need to prepare suitable subgraph view. + partition_nodes = set().union(*partition) + if len(partition_nodes) != len(G): + G = G.subgraph(partition_nodes) + # Each node in the graph/subgraph must be in exactly one block. + if not nx.community.is_partition(G, partition): + raise NetworkXException("each node must be in exactly one part of `partition`") + return _quotient_graph( + G, + partition, + edge_relation, + node_data, + edge_data, + weight, + relabel, + create_using, + ) + + +def _quotient_graph( + G, partition, edge_relation, node_data, edge_data, weight, relabel, create_using +): + """Construct the quotient graph assuming input has been checked""" + if create_using is None: + H = G.__class__() + else: + H = nx.empty_graph(0, create_using) + # By default set some basic information about the subgraph that each block + # represents on the nodes in the quotient graph. + if node_data is None: + + def node_data(b): + S = G.subgraph(b) + return { + "graph": S, + "nnodes": len(S), + "nedges": S.number_of_edges(), + "density": density(S), + } + + # Each block of the partition becomes a node in the quotient graph. + partition = [frozenset(b) for b in partition] + H.add_nodes_from((b, node_data(b)) for b in partition) + # By default, the edge relation is the relation defined as follows. B is + # adjacent to C if a node in B is adjacent to a node in C, according to the + # edge set of G. + # + # This is not a particularly efficient implementation of this relation: + # there are O(n^2) pairs to check and each check may require O(log n) time + # (to check set membership). This can certainly be parallelized. + if edge_relation is None: + + def edge_relation(b, c): + return any(v in G[u] for u, v in product(b, c)) + + # By default, sum the weights of the edges joining pairs of nodes across + # blocks to get the weight of the edge joining those two blocks. + if edge_data is None: + + def edge_data(b, c): + edgedata = ( + d + for u, v, d in G.edges(b | c, data=True) + if (u in b and v in c) or (u in c and v in b) + ) + return {"weight": sum(d.get(weight, 1) for d in edgedata)} + + block_pairs = permutations(H, 2) if H.is_directed() else combinations(H, 2) + # In a multigraph, add one edge in the quotient graph for each edge + # in the original graph. + if H.is_multigraph(): + edges = chaini( + ( + (b, c, G.get_edge_data(u, v, default={})) + for u, v in product(b, c) + if v in G[u] + ) + for b, c in block_pairs + if edge_relation(b, c) + ) + # In a simple graph, apply the edge data function to each pair of + # blocks to determine the edge data attributes to apply to each edge + # in the quotient graph. + else: + edges = ( + (b, c, edge_data(b, c)) for (b, c) in block_pairs if edge_relation(b, c) + ) + H.add_edges_from(edges) + # If requested by the user, relabel the nodes to be integers, + # numbered in increasing order from zero in the same order as the + # iteration order of `partition`. + if relabel: + # Can't use nx.convert_node_labels_to_integers() here since we + # want the order of iteration to be the same for backward + # compatibility with the nx.blockmodel() function. + labels = {b: i for i, b in enumerate(partition)} + H = nx.relabel_nodes(H, labels) + return H + + +@nx._dispatchable( + preserve_all_attrs=True, mutates_input={"not copy": 4}, returns_graph=True +) +def contracted_nodes( + G, u, v, self_loops=True, copy=True, *, store_contraction_as="contraction" +): + """Returns the graph that results from contracting `u` and `v`. + + Node contraction identifies the two nodes as a single node incident to any + edge that was incident to the original two nodes. + + Parameters + ---------- + G : NetworkX graph + The graph whose nodes will be contracted. + + u, v : nodes + Must be nodes in `G`. + + self_loops : Boolean + If this is True, any edges joining `u` and `v` in `G` become + self-loops on the new node in the returned graph. + + copy : Boolean + If this is True (default True), make a copy of + `G` and return that instead of directly changing `G`. + + store_contraction_as : str or None, default="contraction" + Name of the node/edge attribute where information about the contraction + should be stored. By default information about the contracted node and + any contracted edges is stored in a ``"contraction"`` attribute on the + resulting node and edge. If `None`, information about the contracted + nodes/edges and their data are not stored. + + Returns + ------- + Networkx graph + If Copy is True, + A new graph object of the same type as `G` (leaving `G` unmodified) + with `u` and `v` identified in a single node. The right node `v` + will be merged into the node `u`, so only `u` will appear in the + returned graph. + If copy is False, + Modifies `G` with `u` and `v` identified in a single node. + The right node `v` will be merged into the node `u`, so + only `u` will appear in the returned graph. + + Notes + ----- + For multigraphs, the edge keys for the realigned edges may + not be the same as the edge keys for the old edges. This is + natural because edge keys are unique only within each pair of nodes. + + For non-multigraphs where `u` and `v` are adjacent to a third node + `w`, the edge (`v`, `w`) will be contracted into the edge (`u`, + `w`) with its attributes stored into a "contraction" attribute. + + This function is also available as `identified_nodes`. + + Examples + -------- + Contracting two nonadjacent nodes of the cycle graph on four nodes `C_4` + yields the path graph (ignoring parallel edges): + + >>> G = nx.cycle_graph(4) + >>> M = nx.contracted_nodes(G, 1, 3) + >>> P3 = nx.path_graph(3) + >>> nx.is_isomorphic(M, P3) + True + + >>> G = nx.MultiGraph(P3) + >>> M = nx.contracted_nodes(G, 0, 2) + >>> M.edges + MultiEdgeView([(0, 1, 0), (0, 1, 1)]) + + >>> G = nx.Graph([(1, 2), (2, 2)]) + >>> H = nx.contracted_nodes(G, 1, 2, self_loops=False) + >>> list(H.nodes()) + [1] + >>> list(H.edges()) + [(1, 1)] + + In a ``MultiDiGraph`` with a self loop, the in and out edges will + be treated separately as edges, so while contracting a node which + has a self loop the contraction will add multiple edges: + + >>> G = nx.MultiDiGraph([(1, 2), (2, 2)]) + >>> H = nx.contracted_nodes(G, 1, 2) + >>> list(H.edges()) # edge 1->2, 2->2, 2<-2 from the original Graph G + [(1, 1), (1, 1), (1, 1)] + >>> H = nx.contracted_nodes(G, 1, 2, self_loops=False) + >>> list(H.edges()) # edge 2->2, 2<-2 from the original Graph G + [(1, 1), (1, 1)] + + See Also + -------- + contracted_edge + quotient_graph + + """ + # Copying has significant overhead and can be disabled if needed + H = G.copy() if copy else G + + # edge code uses G.edges(v) instead of G.adj[v] to handle multiedges + if H.is_directed(): + edges_to_remap = chain(G.in_edges(v, data=True), G.out_edges(v, data=True)) + else: + edges_to_remap = G.edges(v, data=True) + + # If the H=G, the generators change as H changes + # This makes the edges_to_remap independent of H + if not copy: + edges_to_remap = list(edges_to_remap) + + v_data = H.nodes[v] + H.remove_node(v) + + # A bit of input munging to extract whether contraction info should be + # stored, and if so bind to a shorter name + if _store_contraction := (store_contraction_as is not None): + contraction = store_contraction_as + + for prev_w, prev_x, d in edges_to_remap: + w = prev_w if prev_w != v else u + x = prev_x if prev_x != v else u + + if ({prev_w, prev_x} == {u, v}) and not self_loops: + continue + + if not H.has_edge(w, x) or G.is_multigraph(): + H.add_edge(w, x, **d) + continue + + # Store information about the contracted edge iff `store_contraction` is not None + if _store_contraction: + if contraction in H.edges[(w, x)]: + H.edges[(w, x)][contraction][(prev_w, prev_x)] = d + else: + H.edges[(w, x)][contraction] = {(prev_w, prev_x): d} + + # Store information about the contracted node iff `store_contraction` + if _store_contraction: + if contraction in H.nodes[u]: + H.nodes[u][contraction][v] = v_data + else: + H.nodes[u][contraction] = {v: v_data} + + return H + + +identified_nodes = contracted_nodes + + +@nx._dispatchable( + preserve_all_attrs=True, mutates_input={"not copy": 3}, returns_graph=True +) +def contracted_edge( + G, edge, self_loops=True, copy=True, *, store_contraction_as="contraction" +): + """Returns the graph that results from contracting the specified edge. + + Edge contraction identifies the two endpoints of the edge as a single node + incident to any edge that was incident to the original two nodes. A graph + that results from edge contraction is called a *minor* of the original + graph. + + Parameters + ---------- + G : NetworkX graph + The graph whose edge will be contracted. + + edge : tuple + Must be a pair of nodes in `G`. + + self_loops : Boolean + If this is True, any edges (including `edge`) joining the + endpoints of `edge` in `G` become self-loops on the new node in the + returned graph. + + copy : Boolean (default True) + If this is True, a the contraction will be performed on a copy of `G`, + otherwise the contraction will happen in place. + + store_contraction_as : str or None, default="contraction" + Name of the node/edge attribute where information about the contraction + should be stored. By default information about the contracted node and + any contracted edges is stored in a ``"contraction"`` attribute on the + resulting node and edge. If `None`, information about the contracted + nodes/edges and their data are not stored. + + Returns + ------- + Networkx graph + A new graph object of the same type as `G` (leaving `G` unmodified) + with endpoints of `edge` identified in a single node. The right node + of `edge` will be merged into the left one, so only the left one will + appear in the returned graph. + + Raises + ------ + ValueError + If `edge` is not an edge in `G`. + + Examples + -------- + Attempting to contract two nonadjacent nodes yields an error: + + >>> G = nx.cycle_graph(4) + >>> nx.contracted_edge(G, (1, 3)) + Traceback (most recent call last): + ... + ValueError: Edge (1, 3) does not exist in graph G; cannot contract it + + Contracting two adjacent nodes in the cycle graph on *n* nodes yields the + cycle graph on *n - 1* nodes: + + >>> C5 = nx.cycle_graph(5) + >>> C4 = nx.cycle_graph(4) + >>> M = nx.contracted_edge(C5, (0, 1), self_loops=False) + >>> nx.is_isomorphic(M, C4) + True + + See also + -------- + contracted_nodes + quotient_graph + + """ + u, v = edge[:2] + if not G.has_edge(u, v): + raise ValueError(f"Edge {edge} does not exist in graph G; cannot contract it") + return contracted_nodes( + G, + u, + v, + self_loops=self_loops, + copy=copy, + store_contraction_as=store_contraction_as, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/__pycache__/test_contraction.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/__pycache__/test_contraction.cpython-312.pyc new file mode 100644 index 0000000..a227de4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/__pycache__/test_contraction.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/test_contraction.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/test_contraction.py new file mode 100644 index 0000000..2cecfa1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/minors/tests/test_contraction.py @@ -0,0 +1,544 @@ +"""Unit tests for the :mod:`networkx.algorithms.minors.contraction` module.""" + +import pytest + +import networkx as nx +from networkx.utils import arbitrary_element, edges_equal, nodes_equal + + +def test_quotient_graph_complete_multipartite(): + """Tests that the quotient graph of the complete *n*-partite graph + under the "same neighbors" node relation is the complete graph on *n* + nodes. + + """ + G = nx.complete_multipartite_graph(2, 3, 4) + # Two nodes are equivalent if they are not adjacent but have the same + # neighbor set. + + def same_neighbors(u, v): + return u not in G[v] and v not in G[u] and G[u] == G[v] + + expected = nx.complete_graph(3) + actual = nx.quotient_graph(G, same_neighbors) + # It won't take too long to run a graph isomorphism algorithm on such + # small graphs. + assert nx.is_isomorphic(expected, actual) + + +def test_quotient_graph_complete_bipartite(): + """Tests that the quotient graph of the complete bipartite graph under + the "same neighbors" node relation is `K_2`. + + """ + G = nx.complete_bipartite_graph(2, 3) + # Two nodes are equivalent if they are not adjacent but have the same + # neighbor set. + + def same_neighbors(u, v): + return u not in G[v] and v not in G[u] and G[u] == G[v] + + expected = nx.complete_graph(2) + actual = nx.quotient_graph(G, same_neighbors) + # It won't take too long to run a graph isomorphism algorithm on such + # small graphs. + assert nx.is_isomorphic(expected, actual) + + +def test_quotient_graph_edge_relation(): + """Tests for specifying an alternate edge relation for the quotient + graph. + + """ + G = nx.path_graph(5) + + def identity(u, v): + return u == v + + def same_parity(b, c): + return arbitrary_element(b) % 2 == arbitrary_element(c) % 2 + + actual = nx.quotient_graph(G, identity, same_parity) + expected = nx.Graph() + expected.add_edges_from([(0, 2), (0, 4), (2, 4)]) + expected.add_edge(1, 3) + assert nx.is_isomorphic(actual, expected) + + +def test_condensation_as_quotient(): + """This tests that the condensation of a graph can be viewed as the + quotient graph under the "in the same connected component" equivalence + relation. + + """ + # This example graph comes from the file `test_strongly_connected.py`. + G = nx.DiGraph() + G.add_edges_from( + [ + (1, 2), + (2, 3), + (2, 11), + (2, 12), + (3, 4), + (4, 3), + (4, 5), + (5, 6), + (6, 5), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 9), + (9, 7), + (10, 6), + (11, 2), + (11, 4), + (11, 6), + (12, 6), + (12, 11), + ] + ) + scc = list(nx.strongly_connected_components(G)) + C = nx.condensation(G, scc) + component_of = C.graph["mapping"] + # Two nodes are equivalent if they are in the same connected component. + + def same_component(u, v): + return component_of[u] == component_of[v] + + Q = nx.quotient_graph(G, same_component) + assert nx.is_isomorphic(C, Q) + + +def test_path(): + G = nx.path_graph(6) + partition = [{0, 1}, {2, 3}, {4, 5}] + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M: + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 + + +def test_path__partition_provided_as_dict_of_lists(): + G = nx.path_graph(6) + partition = {0: [0, 1], 2: [2, 3], 4: [4, 5]} + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M: + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 + + +def test_path__partition_provided_as_dict_of_tuples(): + G = nx.path_graph(6) + partition = {0: (0, 1), 2: (2, 3), 4: (4, 5)} + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M: + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 + + +def test_path__partition_provided_as_dict_of_sets(): + G = nx.path_graph(6) + partition = {0: {0, 1}, 2: {2, 3}, 4: {4, 5}} + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M: + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 + + +def test_multigraph_path(): + G = nx.MultiGraph(nx.path_graph(6)) + partition = [{0, 1}, {2, 3}, {4, 5}] + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M: + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 + + +def test_directed_path(): + G = nx.DiGraph() + nx.add_path(G, range(6)) + partition = [{0, 1}, {2, 3}, {4, 5}] + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M: + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 0.5 + + +def test_directed_multigraph_path(): + G = nx.MultiDiGraph() + nx.add_path(G, range(6)) + partition = [{0, 1}, {2, 3}, {4, 5}] + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M: + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 0.5 + + +def test_overlapping_blocks(): + with pytest.raises(nx.NetworkXException): + G = nx.path_graph(6) + partition = [{0, 1, 2}, {2, 3}, {4, 5}] + nx.quotient_graph(G, partition) + + +def test_weighted_path(): + G = nx.path_graph(6) + for i in range(5): + G[i][i + 1]["w"] = i + 1 + partition = [{0, 1}, {2, 3}, {4, 5}] + M = nx.quotient_graph(G, partition, weight="w", relabel=True) + assert nodes_equal(M, [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + assert M[0][1]["weight"] == 2 + assert M[1][2]["weight"] == 4 + for n in M: + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1 + + +def test_barbell(): + G = nx.barbell_graph(3, 0) + partition = [{0, 1, 2}, {3, 4, 5}] + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1]) + assert edges_equal(M.edges(), [(0, 1)]) + for n in M: + assert M.nodes[n]["nedges"] == 3 + assert M.nodes[n]["nnodes"] == 3 + assert M.nodes[n]["density"] == 1 + + +def test_barbell_plus(): + G = nx.barbell_graph(3, 0) + # Add an extra edge joining the bells. + G.add_edge(0, 5) + partition = [{0, 1, 2}, {3, 4, 5}] + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M, [0, 1]) + assert edges_equal(M.edges(), [(0, 1)]) + assert M[0][1]["weight"] == 2 + for n in M: + assert M.nodes[n]["nedges"] == 3 + assert M.nodes[n]["nnodes"] == 3 + assert M.nodes[n]["density"] == 1 + + +def test_blockmodel(): + G = nx.path_graph(6) + partition = [[0, 1], [2, 3], [4, 5]] + M = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(M.nodes(), [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M.nodes(): + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1.0 + + +def test_multigraph_blockmodel(): + G = nx.MultiGraph(nx.path_graph(6)) + partition = [[0, 1], [2, 3], [4, 5]] + M = nx.quotient_graph(G, partition, create_using=nx.MultiGraph(), relabel=True) + assert nodes_equal(M.nodes(), [0, 1, 2]) + assert edges_equal(M.edges(), [(0, 1), (1, 2)]) + for n in M.nodes(): + assert M.nodes[n]["nedges"] == 1 + assert M.nodes[n]["nnodes"] == 2 + assert M.nodes[n]["density"] == 1.0 + + +def test_quotient_graph_incomplete_partition(): + G = nx.path_graph(6) + partition = [] + H = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(H.nodes(), []) + assert edges_equal(H.edges(), []) + + partition = [[0, 1], [2, 3], [5]] + H = nx.quotient_graph(G, partition, relabel=True) + assert nodes_equal(H.nodes(), [0, 1, 2]) + assert edges_equal(H.edges(), [(0, 1)]) + + +@pytest.mark.parametrize("store_contraction_as", ("contraction", "c", None)) +@pytest.mark.parametrize("copy", (True, False)) +@pytest.mark.parametrize("selfloops", (True, False)) +def test_undirected_node_contraction(store_contraction_as, copy, selfloops): + """Tests for node contraction in an undirected graph.""" + G = nx.cycle_graph(4) + actual = nx.contracted_nodes( + G, + 0, + 1, + copy=copy, + self_loops=selfloops, + store_contraction_as=store_contraction_as, + ) + + expected = nx.cycle_graph(3) + if selfloops: + expected.add_edge(0, 0) + + assert nx.is_isomorphic(actual, expected) + + if not copy: + assert actual is G + + # Test contracted node attributes + if store_contraction_as is not None: + assert actual.nodes[0][store_contraction_as] == {1: {}} + else: + assert actual.nodes[0] == {} + # There should be no contracted edges for this case + assert all(d == {} for _, _, d in actual.edges(data=True)) + + +@pytest.mark.parametrize("store_contraction_as", ("contraction", "c", None)) +@pytest.mark.parametrize("copy", (True, False)) +@pytest.mark.parametrize("selfloops", (True, False)) +def test_directed_node_contraction(store_contraction_as, copy, selfloops): + """Tests for node contraction in a directed graph.""" + G = nx.DiGraph(nx.cycle_graph(4)) + actual = nx.contracted_nodes( + G, + 0, + 1, + copy=copy, + self_loops=selfloops, + store_contraction_as=store_contraction_as, + ) + + expected = nx.DiGraph(nx.cycle_graph(3)) + if selfloops: + expected.add_edge(0, 0) + + assert nx.is_isomorphic(actual, expected) + + if not copy: + assert actual is G + # Test contracted node attributes + if store_contraction_as is not None: + assert actual.nodes[0][store_contraction_as] == {1: {}} + else: + assert actual.nodes[0] == {} + # Test contracted edge attributes (only relevant if self loops is enabled) + if selfloops and store_contraction_as: + assert actual.edges[(0, 0)][store_contraction_as] == {(1, 0): {}} + else: + assert all(d == {} for _, _, d in actual.edges(data=True)) + + +@pytest.mark.parametrize("store_contraction_as", ("contraction", "c", None)) +@pytest.mark.parametrize("copy", (True, False)) +@pytest.mark.parametrize("selfloops", (True, False)) +def test_contracted_nodes_multigraph(store_contraction_as, copy, selfloops): + """Tests that using a MultiGraph creates multiple edges. `store_contraction_as` + has no effect for multigraphs.""" + G = nx.path_graph(3, create_using=nx.MultiGraph) + G.add_edges_from([(0, 1), (0, 0), (0, 2)]) + actual = nx.contracted_nodes( + G, + 0, + 2, + copy=copy, + self_loops=selfloops, + store_contraction_as=store_contraction_as, + ) + # Two (0, 1) edges from G, another from the contraction of edge (1, 2) + expected = nx.MultiGraph([(0, 1), (0, 1), (0, 1), (0, 0)]) + # One (0, 0) edge from G, another from the contraction of edge (0, 2), but + # only if `selfloops` is True + if selfloops: + expected.add_edge(0, 0) + + assert edges_equal(actual.edges, expected.edges) + if not copy: + assert actual is G + + +def test_multigraph_keys(): + """Tests that multiedge keys are reset in new graph.""" + G = nx.path_graph(3, create_using=nx.MultiGraph()) + G.add_edge(0, 1, 5) + G.add_edge(0, 0, 0) + G.add_edge(0, 2, 5) + actual = nx.contracted_nodes(G, 0, 2) + expected = nx.MultiGraph() + expected.add_edge(0, 1, 0) + expected.add_edge(0, 1, 5) + expected.add_edge(0, 1, 2) # keyed as 2 b/c 2 edges already in G + expected.add_edge(0, 0, 0) + expected.add_edge(0, 0, 1) # this comes from (0, 2, 5) + assert edges_equal(actual.edges, expected.edges) + + +@pytest.mark.parametrize("store_contraction_as", ("contraction", "c", None)) +@pytest.mark.parametrize("copy", (True, False)) +@pytest.mark.parametrize("selfloops", (True, False)) +def test_node_attributes(store_contraction_as, copy, selfloops): + """Tests that node contraction preserves node attributes.""" + G = nx.cycle_graph(4) + # Add some data to the two nodes being contracted. + G.nodes[0]["foo"] = "bar" + G.nodes[1]["baz"] = "xyzzy" + actual = nx.contracted_nodes( + G, + 0, + 1, + copy=copy, + self_loops=selfloops, + store_contraction_as=store_contraction_as, + ) + # We expect that contracting the nodes 0 and 1 in C_4 yields K_3, but + # with nodes labeled 0, 2, and 3. + expected = nx.complete_graph(3) + expected = nx.relabel_nodes(expected, {1: 2, 2: 3}) + expected.nodes[0]["foo"] = "bar" + # ... and a self-loop (0, 0), if self_loops=True + if selfloops: + expected.add_edge(0, 0) + + if store_contraction_as: + cdict = {1: {"baz": "xyzzy"}} + expected.nodes[0].update({"foo": "bar", store_contraction_as: cdict}) + + assert nx.is_isomorphic(actual, expected) + assert actual.nodes(data=True) == expected.nodes(data=True) + if not copy: + assert actual is G + + +@pytest.mark.parametrize("store_contraction_as", ("contraction", "c", None)) +def test_edge_attributes(store_contraction_as): + """Tests that node contraction preserves edge attributes.""" + # Shape: src1 --> dest <-- src2 + G = nx.DiGraph([("src1", "dest"), ("src2", "dest")]) + G["src1"]["dest"]["value"] = "src1-->dest" + G["src2"]["dest"]["value"] = "src2-->dest" + + # New Shape: src1 --> dest + H = nx.contracted_nodes( + G, "src1", "src2", store_contraction_as=store_contraction_as + ) + assert H.edges[("src1", "dest")]["value"] == "src1-->dest" # Should be unchanged + if store_contraction_as: + assert ( + H.edges[("src1", "dest")][store_contraction_as][("src2", "dest")]["value"] + == "src2-->dest" + ) + else: + assert store_contraction_as not in H.edges[("src1", "dest")] + + G = nx.MultiDiGraph(G) + # New Shape: src1 -(x2)-> dest + H = nx.contracted_nodes( + G, "src1", "src2", store_contraction_as=store_contraction_as + ) + # store_contraction should not affect multigraphs + assert len(H.edges(("src1", "dest"))) == 2 + assert H.edges[("src1", "dest", 0)]["value"] == "src1-->dest" + assert H.edges[("src1", "dest", 1)]["value"] == "src2-->dest" + + +def test_contract_loop_graph(): + """Tests for node contraction when nodes have loops.""" + G = nx.cycle_graph(4) + G.add_edge(0, 0) + actual = nx.contracted_nodes(G, 0, 1) + expected = nx.complete_graph([0, 2, 3]) + expected.add_edge(0, 0) + assert edges_equal(actual.edges, expected.edges) + actual = nx.contracted_nodes(G, 1, 0) + expected = nx.complete_graph([1, 2, 3]) + expected.add_edge(1, 1) + assert edges_equal(actual.edges, expected.edges) + + +@pytest.mark.parametrize("store_contraction_as", ("contraction", "c", None)) +@pytest.mark.parametrize("copy", (True, False)) +@pytest.mark.parametrize("selfloops", (True, False)) +def test_undirected_edge_contraction(store_contraction_as, copy, selfloops): + """Tests for node contraction in an undirected graph.""" + G = nx.cycle_graph(4) + actual = nx.contracted_edge( + G, + (0, 1), + copy=copy, + self_loops=selfloops, + store_contraction_as=store_contraction_as, + ) + + expected = nx.cycle_graph(3) + if selfloops: + expected.add_edge(0, 0) + + assert nx.is_isomorphic(actual, expected) + + if not copy: + assert actual is G + + # Test contracted node attributes + if store_contraction_as is not None: + assert actual.nodes[0][store_contraction_as] == {1: {}} + else: + assert actual.nodes[0] == {} + # There should be no contracted edges for this case + assert all(d == {} for _, _, d in actual.edges(data=True)) + + +@pytest.mark.parametrize("edge", [(0, 1), (0, 1, 0)]) +@pytest.mark.parametrize("store_contraction_as", ("contraction", "c", None)) +@pytest.mark.parametrize("copy", [True, False]) +@pytest.mark.parametrize("selfloops", [True, False]) +def test_multigraph_edge_contraction(edge, store_contraction_as, copy, selfloops): + """Tests for edge contraction in a multigraph""" + G = nx.cycle_graph(4, create_using=nx.MultiGraph) + actual = nx.contracted_edge( + G, + edge, + copy=copy, + self_loops=selfloops, + store_contraction_as=store_contraction_as, + ) + expected = nx.relabel_nodes( + nx.complete_graph(3, create_using=nx.MultiGraph), {0: 0, 1: 2, 2: 3} + ) + if selfloops: + expected.add_edge(0, 0) + + assert edges_equal(actual.edges, expected.edges) + if not copy: + assert actual is G + + +def test_nonexistent_edge(): + """Tests that attempting to contract a nonexistent edge raises an + exception. + + """ + G = nx.cycle_graph(4) + with pytest.raises(ValueError): + nx.contracted_edge(G, (0, 2)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/mis.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/mis.py new file mode 100644 index 0000000..0652ac4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/mis.py @@ -0,0 +1,78 @@ +""" +Algorithm to find a maximal (not maximum) independent set. + +""" + +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = ["maximal_independent_set"] + + +@not_implemented_for("directed") +@py_random_state(2) +@nx._dispatchable +def maximal_independent_set(G, nodes=None, seed=None): + """Returns a random maximal independent set guaranteed to contain + a given set of nodes. + + An independent set is a set of nodes such that the subgraph + of G induced by these nodes contains no edges. A maximal + independent set is an independent set such that it is not possible + to add a new node and still get an independent set. + + Parameters + ---------- + G : NetworkX graph + + nodes : list or iterable + Nodes that must be part of the independent set. This set of nodes + must be independent. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + indep_nodes : list + List of nodes that are part of a maximal independent set. + + Raises + ------ + NetworkXUnfeasible + If the nodes in the provided list are not part of the graph or + do not form an independent set, an exception is raised. + + NetworkXNotImplemented + If `G` is directed. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.maximal_independent_set(G) # doctest: +SKIP + [4, 0, 2] + >>> nx.maximal_independent_set(G, [1]) # doctest: +SKIP + [1, 3] + + Notes + ----- + This algorithm does not solve the maximum independent set problem. + + """ + if not nodes: + nodes = {seed.choice(list(G))} + else: + nodes = set(nodes) + if not nodes.issubset(G): + raise nx.NetworkXUnfeasible(f"{nodes} is not a subset of the nodes of G") + neighbors = set.union(*[set(G.adj[v]) for v in nodes]) + if set.intersection(neighbors, nodes): + raise nx.NetworkXUnfeasible(f"{nodes} is not an independent set of G") + indep_nodes = list(nodes) + available_nodes = set(G.nodes()).difference(neighbors.union(nodes)) + while available_nodes: + node = seed.choice(list(available_nodes)) + indep_nodes.append(node) + available_nodes.difference_update(list(G.adj[node]) + [node]) + return indep_nodes diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/moral.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/moral.py new file mode 100644 index 0000000..e2acf80 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/moral.py @@ -0,0 +1,59 @@ +r"""Function for computing the moral graph of a directed graph.""" + +import itertools + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["moral_graph"] + + +@not_implemented_for("undirected") +@nx._dispatchable(returns_graph=True) +def moral_graph(G): + r"""Return the Moral Graph + + Returns the moralized graph of a given directed graph. + + Parameters + ---------- + G : NetworkX graph + Directed graph + + Returns + ------- + H : NetworkX graph + The undirected moralized graph of G + + Raises + ------ + NetworkXNotImplemented + If `G` is undirected. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (2, 5), (3, 4), (4, 3)]) + >>> G_moral = nx.moral_graph(G) + >>> G_moral.edges() + EdgeView([(1, 2), (2, 3), (2, 5), (2, 4), (3, 4)]) + + Notes + ----- + A moral graph is an undirected graph H = (V, E) generated from a + directed Graph, where if a node has more than one parent node, edges + between these parent nodes are inserted and all directed edges become + undirected. + + https://en.wikipedia.org/wiki/Moral_graph + + References + ---------- + .. [1] Wray L. Buntine. 1995. Chain graphs for learning. + In Proceedings of the Eleventh conference on Uncertainty + in artificial intelligence (UAI'95) + """ + H = G.to_undirected() + for preds in G.pred.values(): + predecessors_combinations = itertools.combinations(preds, r=2) + H.add_edges_from(predecessors_combinations) + return H diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/node_classification.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/node_classification.py new file mode 100644 index 0000000..b69a6c9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/node_classification.py @@ -0,0 +1,219 @@ +"""This module provides the functions for node classification problem. + +The functions in this module are not imported +into the top level `networkx` namespace. +You can access these functions by importing +the `networkx.algorithms.node_classification` modules, +then accessing the functions as attributes of `node_classification`. +For example: + + >>> from networkx.algorithms import node_classification + >>> G = nx.path_graph(4) + >>> G.edges() + EdgeView([(0, 1), (1, 2), (2, 3)]) + >>> G.nodes[0]["label"] = "A" + >>> G.nodes[3]["label"] = "B" + >>> node_classification.harmonic_function(G) + ['A', 'A', 'B', 'B'] + +References +---------- +Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August). +Semi-supervised learning using gaussian fields and harmonic functions. +In ICML (Vol. 3, pp. 912-919). +""" + +import networkx as nx + +__all__ = ["harmonic_function", "local_and_global_consistency"] + + +@nx.utils.not_implemented_for("directed") +@nx._dispatchable(node_attrs="label_name") +def harmonic_function(G, max_iter=30, label_name="label"): + """Node classification by Harmonic function + + Function for computing Harmonic function algorithm by Zhu et al. + + Parameters + ---------- + G : NetworkX Graph + max_iter : int + maximum number of iterations allowed + label_name : string + name of target labels to predict + + Returns + ------- + predicted : list + List of length ``len(G)`` with the predicted labels for each node. + + Raises + ------ + NetworkXError + If no nodes in `G` have attribute `label_name`. + + Examples + -------- + >>> from networkx.algorithms import node_classification + >>> G = nx.path_graph(4) + >>> G.nodes[0]["label"] = "A" + >>> G.nodes[3]["label"] = "B" + >>> G.nodes(data=True) + NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}}) + >>> G.edges() + EdgeView([(0, 1), (1, 2), (2, 3)]) + >>> predicted = node_classification.harmonic_function(G) + >>> predicted + ['A', 'A', 'B', 'B'] + + References + ---------- + Zhu, X., Ghahramani, Z., & Lafferty, J. (2003, August). + Semi-supervised learning using gaussian fields and harmonic functions. + In ICML (Vol. 3, pp. 912-919). + """ + import numpy as np + import scipy as sp + + X = nx.to_scipy_sparse_array(G) # adjacency matrix + labels, label_dict = _get_label_info(G, label_name) + + if labels.shape[0] == 0: + raise nx.NetworkXError( + f"No node on the input graph is labeled by '{label_name}'." + ) + + n_samples = X.shape[0] + n_classes = label_dict.shape[0] + F = np.zeros((n_samples, n_classes)) + + # Build propagation matrix + degrees = X.sum(axis=0) + degrees[degrees == 0] = 1 # Avoid division by 0 + # TODO: csr_array + D = sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0)) + P = (D @ X).tolil() + P[labels[:, 0]] = 0 # labels[:, 0] indicates IDs of labeled nodes + # Build base matrix + B = np.zeros((n_samples, n_classes)) + B[labels[:, 0], labels[:, 1]] = 1 + + for _ in range(max_iter): + F = (P @ F) + B + + return label_dict[np.argmax(F, axis=1)].tolist() + + +@nx.utils.not_implemented_for("directed") +@nx._dispatchable(node_attrs="label_name") +def local_and_global_consistency(G, alpha=0.99, max_iter=30, label_name="label"): + """Node classification by Local and Global Consistency + + Function for computing Local and global consistency algorithm by Zhou et al. + + Parameters + ---------- + G : NetworkX Graph + alpha : float + Clamping factor + max_iter : int + Maximum number of iterations allowed + label_name : string + Name of target labels to predict + + Returns + ------- + predicted : list + List of length ``len(G)`` with the predicted labels for each node. + + Raises + ------ + NetworkXError + If no nodes in `G` have attribute `label_name`. + + Examples + -------- + >>> from networkx.algorithms import node_classification + >>> G = nx.path_graph(4) + >>> G.nodes[0]["label"] = "A" + >>> G.nodes[3]["label"] = "B" + >>> G.nodes(data=True) + NodeDataView({0: {'label': 'A'}, 1: {}, 2: {}, 3: {'label': 'B'}}) + >>> G.edges() + EdgeView([(0, 1), (1, 2), (2, 3)]) + >>> predicted = node_classification.local_and_global_consistency(G) + >>> predicted + ['A', 'A', 'B', 'B'] + + References + ---------- + Zhou, D., Bousquet, O., Lal, T. N., Weston, J., & Schölkopf, B. (2004). + Learning with local and global consistency. + Advances in neural information processing systems, 16(16), 321-328. + """ + import numpy as np + import scipy as sp + + X = nx.to_scipy_sparse_array(G) # adjacency matrix + labels, label_dict = _get_label_info(G, label_name) + + if labels.shape[0] == 0: + raise nx.NetworkXError( + f"No node on the input graph is labeled by '{label_name}'." + ) + + n_samples = X.shape[0] + n_classes = label_dict.shape[0] + F = np.zeros((n_samples, n_classes)) + + # Build propagation matrix + degrees = X.sum(axis=0) + degrees[degrees == 0] = 1 # Avoid division by 0 + # TODO: csr_array + D2 = np.sqrt(sp.sparse.csr_array(sp.sparse.diags((1.0 / degrees), offsets=0))) + P = alpha * ((D2 @ X) @ D2) + # Build base matrix + B = np.zeros((n_samples, n_classes)) + B[labels[:, 0], labels[:, 1]] = 1 - alpha + + for _ in range(max_iter): + F = (P @ F) + B + + return label_dict[np.argmax(F, axis=1)].tolist() + + +def _get_label_info(G, label_name): + """Get and return information of labels from the input graph + + Parameters + ---------- + G : Network X graph + label_name : string + Name of the target label + + Returns + ------- + labels : numpy array, shape = [n_labeled_samples, 2] + Array of pairs of labeled node ID and label ID + label_dict : numpy array, shape = [n_classes] + Array of labels + i-th element contains the label corresponding label ID `i` + """ + import numpy as np + + labels = [] + label_to_id = {} + lid = 0 + for i, n in enumerate(G.nodes(data=True)): + if label_name in n[1]: + label = n[1][label_name] + if label not in label_to_id: + label_to_id[label] = lid + lid += 1 + labels.append([i, label_to_id[label]]) + labels = np.array(labels) + label_dict = np.array( + [label for label, _ in sorted(label_to_id.items(), key=lambda x: x[1])] + ) + return (labels, label_dict) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/non_randomness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/non_randomness.py new file mode 100644 index 0000000..1379911 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/non_randomness.py @@ -0,0 +1,98 @@ +r"""Computation of graph non-randomness""" + +import math + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["non_randomness"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def non_randomness(G, k=None, weight="weight"): + """Compute the non-randomness of graph G. + + The first returned value nr is the sum of non-randomness values of all + edges within the graph (where the non-randomness of an edge tends to be + small when the two nodes linked by that edge are from two different + communities). + + The second computed value nr_rd is a relative measure that indicates + to what extent graph G is different from random graphs in terms + of probability. When it is close to 0, the graph tends to be more + likely generated by an Erdos Renyi model. + + Parameters + ---------- + G : NetworkX graph + Graph must be symmetric, connected, and without self-loops. + + k : int + The number of communities in G. + If k is not set, the function will use a default community + detection algorithm to set it. + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1, i.e., the graph is + binary. + + Returns + ------- + non-randomness : (float, float) tuple + Non-randomness, Relative non-randomness w.r.t. + Erdos Renyi random graphs. + + Raises + ------ + NetworkXException + if the input graph is not connected. + NetworkXError + if the input graph contains self-loops or if graph has no edges. + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> nr, nr_rd = nx.non_randomness(G, 2) + >>> nr, nr_rd = nx.non_randomness(G, 2, "weight") + + Notes + ----- + This computes Eq. (4.4) and (4.5) in Ref. [1]_. + + If a weight field is passed, this algorithm will use the eigenvalues + of the weighted adjacency matrix to compute Eq. (4.4) and (4.5). + + References + ---------- + .. [1] Xiaowei Ying and Xintao Wu, + On Randomness Measures for Social Networks, + SIAM International Conference on Data Mining. 2009 + """ + import numpy as np + + # corner case: graph has no edges + if nx.is_empty(G): + raise nx.NetworkXError("non_randomness not applicable to empty graphs") + if not nx.is_connected(G): + raise nx.NetworkXException("Non connected graph.") + if len(list(nx.selfloop_edges(G))) > 0: + raise nx.NetworkXError("Graph must not contain self-loops") + + if k is None: + k = len(tuple(nx.community.label_propagation_communities(G))) + + # eq. 4.4 + eigenvalues = np.linalg.eigvals(nx.to_numpy_array(G, weight=weight)) + nr = float(np.real(np.sum(eigenvalues[:k]))) + + n = G.number_of_nodes() + m = G.number_of_edges() + p = (2 * k * m) / (n * (n - k)) + + # eq. 4.5 + nr_rd = (nr - ((n - 2 * k) * p + k)) / math.sqrt(2 * k * p * (1 - p)) + + return nr, nr_rd diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__init__.py new file mode 100644 index 0000000..0ebc6ab --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__init__.py @@ -0,0 +1,4 @@ +from networkx.algorithms.operators.all import * +from networkx.algorithms.operators.binary import * +from networkx.algorithms.operators.product import * +from networkx.algorithms.operators.unary import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..317f1e1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/all.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/all.cpython-312.pyc new file mode 100644 index 0000000..5667d04 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/all.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/binary.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/binary.cpython-312.pyc new file mode 100644 index 0000000..8c613d1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/binary.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-312.pyc new file mode 100644 index 0000000..5275f54 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/product.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/unary.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/unary.cpython-312.pyc new file mode 100644 index 0000000..b661ae9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/__pycache__/unary.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/all.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/all.py new file mode 100644 index 0000000..322a15a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/all.py @@ -0,0 +1,324 @@ +"""Operations on many graphs.""" + +from itertools import chain, repeat + +import networkx as nx + +__all__ = ["union_all", "compose_all", "disjoint_union_all", "intersection_all"] + + +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) +def union_all(graphs, rename=()): + """Returns the union of all graphs. + + The graphs must be disjoint, otherwise an exception is raised. + + Parameters + ---------- + graphs : iterable + Iterable of NetworkX graphs + + rename : iterable , optional + Node names of graphs can be changed by specifying the tuple + rename=('G-','H-') (for example). Node "u" in G is then renamed + "G-u" and "v" in H is renamed "H-v". Infinite generators (like itertools.count) + are also supported. + + Returns + ------- + U : a graph with the same type as the first graph in list + + Raises + ------ + ValueError + If `graphs` is an empty list. + + NetworkXError + In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs. + + Notes + ----- + For operating on mixed type graphs, they should be converted to the same type. + >>> G = nx.Graph() + >>> H = nx.DiGraph() + >>> GH = union_all([nx.DiGraph(G), H]) + + To force a disjoint union with node relabeling, use + disjoint_union_all(G,H) or convert_node_labels_to integers(). + + Graph, edge, and node attributes are propagated to the union graph. + If a graph attribute is present in multiple graphs, then the value + from the last graph in the list with that attribute is used. + + Examples + -------- + >>> G1 = nx.Graph([(1, 2), (2, 3)]) + >>> G2 = nx.Graph([(4, 5), (5, 6)]) + >>> result_graph = nx.union_all([G1, G2]) + >>> result_graph.nodes() + NodeView((1, 2, 3, 4, 5, 6)) + >>> result_graph.edges() + EdgeView([(1, 2), (2, 3), (4, 5), (5, 6)]) + + See Also + -------- + union + disjoint_union_all + """ + R = None + seen_nodes = set() + + # rename graph to obtain disjoint node labels + def add_prefix(graph, prefix): + if prefix is None: + return graph + + def label(x): + return f"{prefix}{x}" + + return nx.relabel_nodes(graph, label) + + rename = chain(rename, repeat(None)) + graphs = (add_prefix(G, name) for G, name in zip(graphs, rename)) + + for i, G in enumerate(graphs): + G_nodes_set = set(G.nodes) + if i == 0: + # Union is the same type as first graph + R = G.__class__() + elif G.is_directed() != R.is_directed(): + raise nx.NetworkXError("All graphs must be directed or undirected.") + elif G.is_multigraph() != R.is_multigraph(): + raise nx.NetworkXError("All graphs must be graphs or multigraphs.") + elif not seen_nodes.isdisjoint(G_nodes_set): + raise nx.NetworkXError( + "The node sets of the graphs are not disjoint.\n" + "Use `rename` to specify prefixes for the graphs or use\n" + "disjoint_union(G1, G2, ..., GN)." + ) + + seen_nodes |= G_nodes_set + R.graph.update(G.graph) + R.add_nodes_from(G.nodes(data=True)) + R.add_edges_from( + G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True) + ) + + if R is None: + raise ValueError("cannot apply union_all to an empty list") + + return R + + +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) +def disjoint_union_all(graphs): + """Returns the disjoint union of all graphs. + + This operation forces distinct integer node labels starting with 0 + for the first graph in the list and numbering consecutively. + + Parameters + ---------- + graphs : iterable + Iterable of NetworkX graphs + + Returns + ------- + U : A graph with the same type as the first graph in list + + Raises + ------ + ValueError + If `graphs` is an empty list. + + NetworkXError + In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs. + + Examples + -------- + >>> G1 = nx.Graph([(1, 2), (2, 3)]) + >>> G2 = nx.Graph([(4, 5), (5, 6)]) + >>> U = nx.disjoint_union_all([G1, G2]) + >>> list(U.nodes()) + [0, 1, 2, 3, 4, 5] + >>> list(U.edges()) + [(0, 1), (1, 2), (3, 4), (4, 5)] + + Notes + ----- + For operating on mixed type graphs, they should be converted to the same type. + + Graph, edge, and node attributes are propagated to the union graph. + If a graph attribute is present in multiple graphs, then the value + from the last graph in the list with that attribute is used. + """ + + def yield_relabeled(graphs): + first_label = 0 + for G in graphs: + yield nx.convert_node_labels_to_integers(G, first_label=first_label) + first_label += len(G) + + R = union_all(yield_relabeled(graphs)) + + return R + + +@nx._dispatchable(graphs="[graphs]", preserve_all_attrs=True, returns_graph=True) +def compose_all(graphs): + """Returns the composition of all graphs. + + Composition is the simple union of the node sets and edge sets. + The node sets of the supplied graphs need not be disjoint. + + Parameters + ---------- + graphs : iterable + Iterable of NetworkX graphs + + Returns + ------- + C : A graph with the same type as the first graph in list + + Raises + ------ + ValueError + If `graphs` is an empty list. + + NetworkXError + In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs. + + Examples + -------- + >>> G1 = nx.Graph([(1, 2), (2, 3)]) + >>> G2 = nx.Graph([(3, 4), (5, 6)]) + >>> C = nx.compose_all([G1, G2]) + >>> list(C.nodes()) + [1, 2, 3, 4, 5, 6] + >>> list(C.edges()) + [(1, 2), (2, 3), (3, 4), (5, 6)] + + Notes + ----- + For operating on mixed type graphs, they should be converted to the same type. + + Graph, edge, and node attributes are propagated to the union graph. + If a graph attribute is present in multiple graphs, then the value + from the last graph in the list with that attribute is used. + """ + R = None + + # add graph attributes, H attributes take precedent over G attributes + for i, G in enumerate(graphs): + if i == 0: + # create new graph + R = G.__class__() + elif G.is_directed() != R.is_directed(): + raise nx.NetworkXError("All graphs must be directed or undirected.") + elif G.is_multigraph() != R.is_multigraph(): + raise nx.NetworkXError("All graphs must be graphs or multigraphs.") + + R.graph.update(G.graph) + R.add_nodes_from(G.nodes(data=True)) + R.add_edges_from( + G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True) + ) + + if R is None: + raise ValueError("cannot apply compose_all to an empty list") + + return R + + +@nx._dispatchable(graphs="[graphs]", returns_graph=True) +def intersection_all(graphs): + """Returns a new graph that contains only the nodes and the edges that exist in + all graphs. + + Parameters + ---------- + graphs : iterable + Iterable of NetworkX graphs + + Returns + ------- + R : A new graph with the same type as the first graph in list + + Raises + ------ + ValueError + If `graphs` is an empty list. + + NetworkXError + In case of mixed type graphs, like MultiGraph and Graph, or directed and undirected graphs. + + Notes + ----- + For operating on mixed type graphs, they should be converted to the same type. + + Attributes from the graph, nodes, and edges are not copied to the new + graph. + + The resulting graph can be updated with attributes if desired. + For example, code which adds the minimum attribute for each node across all + graphs could work:: + + >>> g = nx.Graph() + >>> g.add_node(0, capacity=4) + >>> g.add_node(1, capacity=3) + >>> g.add_edge(0, 1) + + >>> h = g.copy() + >>> h.nodes[0]["capacity"] = 2 + + >>> gh = nx.intersection_all([g, h]) + + >>> new_node_attr = { + ... n: min(*(anyG.nodes[n].get("capacity", float("inf")) for anyG in [g, h])) + ... for n in gh + ... } + >>> nx.set_node_attributes(gh, new_node_attr, "new_capacity") + >>> gh.nodes(data=True) + NodeDataView({0: {'new_capacity': 2}, 1: {'new_capacity': 3}}) + + Examples + -------- + >>> G1 = nx.Graph([(1, 2), (2, 3)]) + >>> G2 = nx.Graph([(2, 3), (3, 4)]) + >>> R = nx.intersection_all([G1, G2]) + >>> list(R.nodes()) + [2, 3] + >>> list(R.edges()) + [(2, 3)] + + """ + R = None + + for i, G in enumerate(graphs): + G_nodes_set = set(G.nodes) + G_edges_set = set(G.edges) + if not G.is_directed(): + if G.is_multigraph(): + G_edges_set.update((v, u, k) for u, v, k in list(G_edges_set)) + else: + G_edges_set.update((v, u) for u, v in list(G_edges_set)) + if i == 0: + # create new graph + R = G.__class__() + node_intersection = G_nodes_set + edge_intersection = G_edges_set + elif G.is_directed() != R.is_directed(): + raise nx.NetworkXError("All graphs must be directed or undirected.") + elif G.is_multigraph() != R.is_multigraph(): + raise nx.NetworkXError("All graphs must be graphs or multigraphs.") + else: + node_intersection &= G_nodes_set + edge_intersection &= G_edges_set + + if R is None: + raise ValueError("cannot apply intersection_all to an empty list") + + R.add_nodes_from(node_intersection) + R.add_edges_from(edge_intersection) + + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/binary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/binary.py new file mode 100644 index 0000000..c121292 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/binary.py @@ -0,0 +1,468 @@ +""" +Operations on graphs including union, intersection, difference. +""" + +import networkx as nx + +__all__ = [ + "union", + "compose", + "disjoint_union", + "intersection", + "difference", + "symmetric_difference", + "full_join", +] +_G_H = {"G": 0, "H": 1} + + +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) +def union(G, H, rename=()): + """Combine graphs G and H. The names of nodes must be unique. + + A name collision between the graphs will raise an exception. + + A renaming facility is provided to avoid name collisions. + + + Parameters + ---------- + G, H : graph + A NetworkX graph + + rename : iterable , optional + Node names of G and H can be changed by specifying the tuple + rename=('G-','H-') (for example). Node "u" in G is then renamed + "G-u" and "v" in H is renamed "H-v". + + Returns + ------- + U : A union graph with the same type as G. + + See Also + -------- + compose + :func:`~networkx.Graph.update` + disjoint_union + + Notes + ----- + To combine graphs that have common nodes, consider compose(G, H) + or the method, Graph.update(). + + disjoint_union() is similar to union() except that it avoids name clashes + by relabeling the nodes with sequential integers. + + Edge and node attributes are propagated from G and H to the union graph. + Graph attributes are also propagated, but if they are present in both G and H, + then the value from H is used. + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)]) + >>> H = nx.Graph([(0, 1), (0, 3), (1, 3), (1, 2)]) + >>> U = nx.union(G, H, rename=("G", "H")) + >>> U.nodes + NodeView(('G0', 'G1', 'G2', 'H0', 'H1', 'H3', 'H2')) + >>> edgelist = list(U.edges) + >>> pprint(edgelist) + [('G0', 'G1'), + ('G0', 'G2'), + ('G1', 'G2'), + ('H0', 'H1'), + ('H0', 'H3'), + ('H1', 'H3'), + ('H1', 'H2')] + + + """ + return nx.union_all([G, H], rename) + + +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) +def disjoint_union(G, H): + """Combine graphs G and H. The nodes are assumed to be unique (disjoint). + + This algorithm automatically relabels nodes to avoid name collisions. + + Parameters + ---------- + G,H : graph + A NetworkX graph + + Returns + ------- + U : A union graph with the same type as G. + + See Also + -------- + union + compose + :func:`~networkx.Graph.update` + + Notes + ----- + A new graph is created, of the same class as G. It is recommended + that G and H be either both directed or both undirected. + + The nodes of G are relabeled 0 to len(G)-1, and the nodes of H are + relabeled len(G) to len(G)+len(H)-1. + + Renumbering forces G and H to be disjoint, so no exception is ever raised for a name collision. + To preserve the check for common nodes, use union(). + + Edge and node attributes are propagated from G and H to the union graph. + Graph attributes are also propagated, but if they are present in both G and H, + then the value from H is used. + + To combine graphs that have common nodes, consider compose(G, H) + or the method, Graph.update(). + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)]) + >>> H = nx.Graph([(0, 3), (1, 2), (2, 3)]) + >>> G.nodes[0]["key1"] = 5 + >>> H.nodes[0]["key2"] = 10 + >>> U = nx.disjoint_union(G, H) + >>> U.nodes(data=True) + NodeDataView({0: {'key1': 5}, 1: {}, 2: {}, 3: {'key2': 10}, 4: {}, 5: {}, 6: {}}) + >>> U.edges + EdgeView([(0, 1), (0, 2), (1, 2), (3, 4), (4, 6), (5, 6)]) + """ + return nx.disjoint_union_all([G, H]) + + +@nx._dispatchable(graphs=_G_H, returns_graph=True) +def intersection(G, H): + """Returns a new graph that contains only the nodes and the edges that exist in + both G and H. + + Parameters + ---------- + G,H : graph + A NetworkX graph. G and H can have different node sets but must be both graphs or both multigraphs. + + Raises + ------ + NetworkXError + If one is a MultiGraph and the other one is a graph. + + Returns + ------- + GH : A new graph with the same type as G. + + Notes + ----- + Attributes from the graph, nodes, and edges are not copied to the new + graph. If you want a new graph of the intersection of G and H + with the attributes (including edge data) from G use remove_nodes_from() + as follows + + >>> G = nx.path_graph(3) + >>> H = nx.path_graph(5) + >>> R = G.copy() + >>> R.remove_nodes_from(n for n in G if n not in H) + >>> R.remove_edges_from(e for e in G.edges if e not in H.edges) + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2)]) + >>> H = nx.Graph([(0, 3), (1, 2), (2, 3)]) + >>> R = nx.intersection(G, H) + >>> R.nodes + NodeView((0, 1, 2)) + >>> R.edges + EdgeView([(1, 2)]) + """ + return nx.intersection_all([G, H]) + + +@nx._dispatchable(graphs=_G_H, returns_graph=True) +def difference(G, H): + """Returns a new graph that contains the edges that exist in G but not in H. + + The node sets of H and G must be the same. + + Parameters + ---------- + G,H : graph + A NetworkX graph. G and H must have the same node sets. + + Returns + ------- + D : A new graph with the same type as G. + + Notes + ----- + Attributes from the graph, nodes, and edges are not copied to the new + graph. If you want a new graph of the difference of G and H with + the attributes (including edge data) from G use remove_nodes_from() + as follows: + + >>> G = nx.path_graph(3) + >>> H = nx.path_graph(5) + >>> R = G.copy() + >>> R.remove_nodes_from(n for n in G if n in H) + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3)]) + >>> H = nx.Graph([(0, 1), (1, 2), (0, 3)]) + >>> R = nx.difference(G, H) + >>> R.nodes + NodeView((0, 1, 2, 3)) + >>> R.edges + EdgeView([(0, 2), (1, 3)]) + """ + # create new graph + if not G.is_multigraph() == H.is_multigraph(): + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") + R = nx.create_empty_copy(G, with_data=False) + + if set(G) != set(H): + raise nx.NetworkXError("Node sets of graphs not equal") + + if G.is_multigraph(): + edges = G.edges(keys=True) + else: + edges = G.edges() + for e in edges: + if not H.has_edge(*e): + R.add_edge(*e) + return R + + +@nx._dispatchable(graphs=_G_H, returns_graph=True) +def symmetric_difference(G, H): + """Returns new graph with edges that exist in either G or H but not both. + + The node sets of H and G must be the same. + + Parameters + ---------- + G,H : graph + A NetworkX graph. G and H must have the same node sets. + + Returns + ------- + D : A new graph with the same type as G. + + Notes + ----- + Attributes from the graph, nodes, and edges are not copied to the new + graph. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3)]) + >>> H = nx.Graph([(0, 1), (1, 2), (0, 3)]) + >>> R = nx.symmetric_difference(G, H) + >>> R.nodes + NodeView((0, 1, 2, 3)) + >>> R.edges + EdgeView([(0, 2), (0, 3), (1, 3)]) + """ + # create new graph + if not G.is_multigraph() == H.is_multigraph(): + raise nx.NetworkXError("G and H must both be graphs or multigraphs.") + R = nx.create_empty_copy(G, with_data=False) + + if set(G) != set(H): + raise nx.NetworkXError("Node sets of graphs not equal") + + gnodes = set(G) # set of nodes in G + hnodes = set(H) # set of nodes in H + nodes = gnodes.symmetric_difference(hnodes) + R.add_nodes_from(nodes) + + if G.is_multigraph(): + edges = G.edges(keys=True) + else: + edges = G.edges() + # we could copy the data here but then this function doesn't + # match intersection and difference + for e in edges: + if not H.has_edge(*e): + R.add_edge(*e) + + if H.is_multigraph(): + edges = H.edges(keys=True) + else: + edges = H.edges() + for e in edges: + if not G.has_edge(*e): + R.add_edge(*e) + return R + + +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) +def compose(G, H): + """Compose graph G with H by combining nodes and edges into a single graph. + + The node sets and edges sets do not need to be disjoint. + + Composing preserves the attributes of nodes and edges. + Attribute values from H take precedent over attribute values from G. + + Parameters + ---------- + G, H : graph + A NetworkX graph + + Returns + ------- + C: A new graph with the same type as G + + See Also + -------- + :func:`~networkx.Graph.update` + union + disjoint_union + + Notes + ----- + It is recommended that G and H be either both directed or both undirected. + + For MultiGraphs, the edges are identified by incident nodes AND edge-key. + This can cause surprises (i.e., edge `(1, 2)` may or may not be the same + in two graphs) if you use MultiGraph without keeping track of edge keys. + + If combining the attributes of common nodes is not desired, consider union(), + which raises an exception for name collisions. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2)]) + >>> H = nx.Graph([(0, 1), (1, 2)]) + >>> R = nx.compose(G, H) + >>> R.nodes + NodeView((0, 1, 2)) + >>> R.edges + EdgeView([(0, 1), (0, 2), (1, 2)]) + + By default, the attributes from `H` take precedent over attributes from `G`. + If you prefer another way of combining attributes, you can update them after the compose operation: + + >>> G = nx.Graph([(0, 1, {"weight": 2.0}), (3, 0, {"weight": 100.0})]) + >>> H = nx.Graph([(0, 1, {"weight": 10.0}), (1, 2, {"weight": -1.0})]) + >>> nx.set_node_attributes(G, {0: "dark", 1: "light", 3: "black"}, name="color") + >>> nx.set_node_attributes(H, {0: "green", 1: "orange", 2: "yellow"}, name="color") + >>> GcomposeH = nx.compose(G, H) + + Normally, color attribute values of nodes of GcomposeH come from H. We can workaround this as follows: + + >>> node_data = { + ... n: G.nodes[n]["color"] + " " + H.nodes[n]["color"] + ... for n in G.nodes & H.nodes + ... } + >>> nx.set_node_attributes(GcomposeH, node_data, "color") + >>> print(GcomposeH.nodes[0]["color"]) + dark green + + >>> print(GcomposeH.nodes[3]["color"]) + black + + Similarly, we can update edge attributes after the compose operation in a way we prefer: + + >>> edge_data = { + ... e: G.edges[e]["weight"] * H.edges[e]["weight"] for e in G.edges & H.edges + ... } + >>> nx.set_edge_attributes(GcomposeH, edge_data, "weight") + >>> print(GcomposeH.edges[(0, 1)]["weight"]) + 20.0 + + >>> print(GcomposeH.edges[(3, 0)]["weight"]) + 100.0 + """ + return nx.compose_all([G, H]) + + +@nx._dispatchable(graphs=_G_H, preserve_all_attrs=True, returns_graph=True) +def full_join(G, H, rename=(None, None)): + """Returns the full join of graphs G and H. + + Full join is the union of G and H in which all edges between + G and H are added. + The node sets of G and H must be disjoint, + otherwise an exception is raised. + + Parameters + ---------- + G, H : graph + A NetworkX graph + + rename : tuple , default=(None, None) + Node names of G and H can be changed by specifying the tuple + rename=('G-','H-') (for example). Node "u" in G is then renamed + "G-u" and "v" in H is renamed "H-v". + + Returns + ------- + U : The full join graph with the same type as G. + + Notes + ----- + It is recommended that G and H be either both directed or both undirected. + + If G is directed, then edges from G to H are added as well as from H to G. + + Note that full_join() does not produce parallel edges for MultiGraphs. + + The full join operation of graphs G and H is the same as getting + their complement, performing a disjoint union, and finally getting + the complement of the resulting graph. + + Graph, edge, and node attributes are propagated from G and H + to the union graph. If a graph attribute is present in both + G and H the value from H is used. + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.Graph([(0, 1), (0, 2)]) + >>> H = nx.Graph([(3, 4)]) + >>> R = nx.full_join(G, H, rename=("G", "H")) + >>> R.nodes + NodeView(('G0', 'G1', 'G2', 'H3', 'H4')) + >>> edgelist = list(R.edges) + >>> pprint(edgelist) + [('G0', 'G1'), + ('G0', 'G2'), + ('G0', 'H3'), + ('G0', 'H4'), + ('G1', 'H3'), + ('G1', 'H4'), + ('G2', 'H3'), + ('G2', 'H4'), + ('H3', 'H4')] + + See Also + -------- + union + disjoint_union + """ + R = union(G, H, rename) + + def add_prefix(graph, prefix): + if prefix is None: + return graph + + def label(x): + return f"{prefix}{x}" + + return nx.relabel_nodes(graph, label) + + G = add_prefix(G, rename[0]) + H = add_prefix(H, rename[1]) + + for i in G: + for j in H: + R.add_edge(i, j) + if R.is_directed(): + for i in H: + for j in G: + R.add_edge(i, j) + + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/product.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/product.py new file mode 100644 index 0000000..28ca78b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/product.py @@ -0,0 +1,633 @@ +""" +Graph products. +""" + +from itertools import product + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "tensor_product", + "cartesian_product", + "lexicographic_product", + "strong_product", + "power", + "rooted_product", + "corona_product", + "modular_product", +] +_G_H = {"G": 0, "H": 1} + + +def _dict_product(d1, d2): + return {k: (d1.get(k), d2.get(k)) for k in set(d1) | set(d2)} + + +# Generators for producing graph products +def _node_product(G, H): + for u, v in product(G, H): + yield ((u, v), _dict_product(G.nodes[u], H.nodes[v])) + + +def _directed_edges_cross_edges(G, H): + if not G.is_multigraph() and not H.is_multigraph(): + for u, v, c in G.edges(data=True): + for x, y, d in H.edges(data=True): + yield (u, x), (v, y), _dict_product(c, d) + if not G.is_multigraph() and H.is_multigraph(): + for u, v, c in G.edges(data=True): + for x, y, k, d in H.edges(data=True, keys=True): + yield (u, x), (v, y), k, _dict_product(c, d) + if G.is_multigraph() and not H.is_multigraph(): + for u, v, k, c in G.edges(data=True, keys=True): + for x, y, d in H.edges(data=True): + yield (u, x), (v, y), k, _dict_product(c, d) + if G.is_multigraph() and H.is_multigraph(): + for u, v, j, c in G.edges(data=True, keys=True): + for x, y, k, d in H.edges(data=True, keys=True): + yield (u, x), (v, y), (j, k), _dict_product(c, d) + + +def _undirected_edges_cross_edges(G, H): + if not G.is_multigraph() and not H.is_multigraph(): + for u, v, c in G.edges(data=True): + for x, y, d in H.edges(data=True): + yield (v, x), (u, y), _dict_product(c, d) + if not G.is_multigraph() and H.is_multigraph(): + for u, v, c in G.edges(data=True): + for x, y, k, d in H.edges(data=True, keys=True): + yield (v, x), (u, y), k, _dict_product(c, d) + if G.is_multigraph() and not H.is_multigraph(): + for u, v, k, c in G.edges(data=True, keys=True): + for x, y, d in H.edges(data=True): + yield (v, x), (u, y), k, _dict_product(c, d) + if G.is_multigraph() and H.is_multigraph(): + for u, v, j, c in G.edges(data=True, keys=True): + for x, y, k, d in H.edges(data=True, keys=True): + yield (v, x), (u, y), (j, k), _dict_product(c, d) + + +def _edges_cross_nodes(G, H): + if G.is_multigraph(): + for u, v, k, d in G.edges(data=True, keys=True): + for x in H: + yield (u, x), (v, x), k, d + else: + for u, v, d in G.edges(data=True): + for x in H: + if H.is_multigraph(): + yield (u, x), (v, x), None, d + else: + yield (u, x), (v, x), d + + +def _nodes_cross_edges(G, H): + if H.is_multigraph(): + for x in G: + for u, v, k, d in H.edges(data=True, keys=True): + yield (x, u), (x, v), k, d + else: + for x in G: + for u, v, d in H.edges(data=True): + if G.is_multigraph(): + yield (x, u), (x, v), None, d + else: + yield (x, u), (x, v), d + + +def _edges_cross_nodes_and_nodes(G, H): + if G.is_multigraph(): + for u, v, k, d in G.edges(data=True, keys=True): + for x in H: + for y in H: + yield (u, x), (v, y), k, d + else: + for u, v, d in G.edges(data=True): + for x in H: + for y in H: + if H.is_multigraph(): + yield (u, x), (v, y), None, d + else: + yield (u, x), (v, y), d + + +def _init_product_graph(G, H): + if G.is_directed() != H.is_directed(): + msg = "G and H must be both directed or both undirected" + raise nx.NetworkXError(msg) + if G.is_multigraph() or H.is_multigraph(): + GH = nx.MultiGraph() + else: + GH = nx.Graph() + if G.is_directed(): + GH = GH.to_directed() + return GH + + +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) +def tensor_product(G, H): + r"""Returns the tensor product of G and H. + + The tensor product $P$ of the graphs $G$ and $H$ has a node set that + is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. + $P$ has an edge $((u,v), (x,y))$ if and only if $(u,x)$ is an edge in $G$ + and $(v,y)$ is an edge in $H$. + + Tensor product is sometimes also referred to as the categorical product, + direct product, cardinal product or conjunction. + + + Parameters + ---------- + G, H: graphs + Networkx graphs. + + Returns + ------- + P: NetworkX graph + The tensor product of G and H. P will be a multi-graph if either G + or H is a multi-graph, will be a directed if G and H are directed, + and undirected if G and H are undirected. + + Raises + ------ + NetworkXError + If G and H are not both directed or both undirected. + + Notes + ----- + Node attributes in P are two-tuple of the G and H node attributes. + Missing attributes are assigned None. + + Examples + -------- + >>> G = nx.Graph() + >>> H = nx.Graph() + >>> G.add_node(0, a1=True) + >>> H.add_node("a", a2="Spam") + >>> P = nx.tensor_product(G, H) + >>> list(P) + [(0, 'a')] + + Edge attributes and edge keys (for multigraphs) are also copied to the + new product graph + """ + GH = _init_product_graph(G, H) + GH.add_nodes_from(_node_product(G, H)) + GH.add_edges_from(_directed_edges_cross_edges(G, H)) + if not GH.is_directed(): + GH.add_edges_from(_undirected_edges_cross_edges(G, H)) + return GH + + +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) +def cartesian_product(G, H): + r"""Returns the Cartesian product of G and H. + + The Cartesian product $P$ of the graphs $G$ and $H$ has a node set that + is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. + $P$ has an edge $((u,v),(x,y))$ if and only if either $u$ is equal to $x$ + and both $v$ and $y$ are adjacent in $H$ or if $v$ is equal to $y$ and + both $u$ and $x$ are adjacent in $G$. + + Parameters + ---------- + G, H: graphs + Networkx graphs. + + Returns + ------- + P: NetworkX graph + The Cartesian product of G and H. P will be a multi-graph if either G + or H is a multi-graph. Will be a directed if G and H are directed, + and undirected if G and H are undirected. + + Raises + ------ + NetworkXError + If G and H are not both directed or both undirected. + + Notes + ----- + Node attributes in P are two-tuple of the G and H node attributes. + Missing attributes are assigned None. + + Examples + -------- + >>> G = nx.Graph() + >>> H = nx.Graph() + >>> G.add_node(0, a1=True) + >>> H.add_node("a", a2="Spam") + >>> P = nx.cartesian_product(G, H) + >>> list(P) + [(0, 'a')] + + Edge attributes and edge keys (for multigraphs) are also copied to the + new product graph + """ + GH = _init_product_graph(G, H) + GH.add_nodes_from(_node_product(G, H)) + GH.add_edges_from(_edges_cross_nodes(G, H)) + GH.add_edges_from(_nodes_cross_edges(G, H)) + return GH + + +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) +def lexicographic_product(G, H): + r"""Returns the lexicographic product of G and H. + + The lexicographical product $P$ of the graphs $G$ and $H$ has a node set + that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. + $P$ has an edge $((u,v), (x,y))$ if and only if $(u,v)$ is an edge in $G$ + or $u==v$ and $(x,y)$ is an edge in $H$. + + Parameters + ---------- + G, H: graphs + Networkx graphs. + + Returns + ------- + P: NetworkX graph + The Cartesian product of G and H. P will be a multi-graph if either G + or H is a multi-graph. Will be a directed if G and H are directed, + and undirected if G and H are undirected. + + Raises + ------ + NetworkXError + If G and H are not both directed or both undirected. + + Notes + ----- + Node attributes in P are two-tuple of the G and H node attributes. + Missing attributes are assigned None. + + Examples + -------- + >>> G = nx.Graph() + >>> H = nx.Graph() + >>> G.add_node(0, a1=True) + >>> H.add_node("a", a2="Spam") + >>> P = nx.lexicographic_product(G, H) + >>> list(P) + [(0, 'a')] + + Edge attributes and edge keys (for multigraphs) are also copied to the + new product graph + """ + GH = _init_product_graph(G, H) + GH.add_nodes_from(_node_product(G, H)) + # Edges in G regardless of H designation + GH.add_edges_from(_edges_cross_nodes_and_nodes(G, H)) + # For each x in G, only if there is an edge in H + GH.add_edges_from(_nodes_cross_edges(G, H)) + return GH + + +@nx._dispatchable(graphs=_G_H, preserve_node_attrs=True, returns_graph=True) +def strong_product(G, H): + r"""Returns the strong product of G and H. + + The strong product $P$ of the graphs $G$ and $H$ has a node set that + is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. + $P$ has an edge $((u,x), (v,y))$ if any of the following conditions + are met: + + - $u=v$ and $(x,y)$ is an edge in $H$ + - $x=y$ and $(u,v)$ is an edge in $G$ + - $(u,v)$ is an edge in $G$ and $(x,y)$ is an edge in $H$ + + Parameters + ---------- + G, H: graphs + Networkx graphs. + + Returns + ------- + P: NetworkX graph + The Cartesian product of G and H. P will be a multi-graph if either G + or H is a multi-graph. Will be a directed if G and H are directed, + and undirected if G and H are undirected. + + Raises + ------ + NetworkXError + If G and H are not both directed or both undirected. + + Notes + ----- + Node attributes in P are two-tuple of the G and H node attributes. + Missing attributes are assigned None. + + Examples + -------- + >>> G = nx.Graph() + >>> H = nx.Graph() + >>> G.add_node(0, a1=True) + >>> H.add_node("a", a2="Spam") + >>> P = nx.strong_product(G, H) + >>> list(P) + [(0, 'a')] + + Edge attributes and edge keys (for multigraphs) are also copied to the + new product graph + """ + GH = _init_product_graph(G, H) + GH.add_nodes_from(_node_product(G, H)) + GH.add_edges_from(_nodes_cross_edges(G, H)) + GH.add_edges_from(_edges_cross_nodes(G, H)) + GH.add_edges_from(_directed_edges_cross_edges(G, H)) + if not GH.is_directed(): + GH.add_edges_from(_undirected_edges_cross_edges(G, H)) + return GH + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(returns_graph=True) +def power(G, k): + """Returns the specified power of a graph. + + The $k$th power of a simple graph $G$, denoted $G^k$, is a + graph on the same set of nodes in which two distinct nodes $u$ and + $v$ are adjacent in $G^k$ if and only if the shortest path + distance between $u$ and $v$ in $G$ is at most $k$. + + Parameters + ---------- + G : graph + A NetworkX simple graph object. + + k : positive integer + The power to which to raise the graph `G`. + + Returns + ------- + NetworkX simple graph + `G` to the power `k`. + + Raises + ------ + ValueError + If the exponent `k` is not positive. + + NetworkXNotImplemented + If `G` is not a simple graph. + + Examples + -------- + The number of edges will never decrease when taking successive + powers: + + >>> G = nx.path_graph(4) + >>> list(nx.power(G, 2).edges) + [(0, 1), (0, 2), (1, 2), (1, 3), (2, 3)] + >>> list(nx.power(G, 3).edges) + [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)] + + The `k` th power of a cycle graph on *n* nodes is the complete graph + on *n* nodes, if `k` is at least ``n // 2``: + + >>> G = nx.cycle_graph(5) + >>> H = nx.complete_graph(5) + >>> nx.is_isomorphic(nx.power(G, 2), H) + True + >>> G = nx.cycle_graph(8) + >>> H = nx.complete_graph(8) + >>> nx.is_isomorphic(nx.power(G, 4), H) + True + + References + ---------- + .. [1] J. A. Bondy, U. S. R. Murty, *Graph Theory*. Springer, 2008. + + Notes + ----- + This definition of "power graph" comes from Exercise 3.1.6 of + *Graph Theory* by Bondy and Murty [1]_. + + """ + if k <= 0: + raise ValueError("k must be a positive integer") + H = nx.Graph() + H.add_nodes_from(G) + # update BFS code to ignore self loops. + for n in G: + seen = {} # level (number of hops) when seen in BFS + level = 1 # the current level + nextlevel = G[n] + while nextlevel: + thislevel = nextlevel # advance to next level + nextlevel = {} # and start a new list (fringe) + for v in thislevel: + if v == n: # avoid self loop + continue + if v not in seen: + seen[v] = level # set the level of vertex v + nextlevel.update(G[v]) # add neighbors of v + if k <= level: + break + level += 1 + H.add_edges_from((n, nbr) for nbr in seen) + return H + + +@not_implemented_for("multigraph") +@nx._dispatchable(graphs=_G_H, returns_graph=True) +def rooted_product(G, H, root): + """Return the rooted product of graphs G and H rooted at root in H. + + A new graph is constructed representing the rooted product of + the inputted graphs, G and H, with a root in H. + A rooted product duplicates H for each nodes in G with the root + of H corresponding to the node in G. Nodes are renamed as the direct + product of G and H. The result is a subgraph of the cartesian product. + + Parameters + ---------- + G,H : graph + A NetworkX graph + root : node + A node in H + + Returns + ------- + R : The rooted product of G and H with a specified root in H + + Notes + ----- + The nodes of R are the Cartesian Product of the nodes of G and H. + The nodes of G and H are not relabeled. + """ + if root not in H: + raise nx.NodeNotFound("root must be a vertex in H") + + R = nx.Graph() + R.add_nodes_from(product(G, H)) + + R.add_edges_from(((e[0], root), (e[1], root)) for e in G.edges()) + R.add_edges_from(((g, e[0]), (g, e[1])) for g in G for e in H.edges()) + + return R + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(graphs=_G_H, returns_graph=True) +def corona_product(G, H): + r"""Returns the Corona product of G and H. + + The corona product of $G$ and $H$ is the graph $C = G \circ H$ obtained by + taking one copy of $G$, called the center graph, $|V(G)|$ copies of $H$, + called the outer graph, and making the $i$-th vertex of $G$ adjacent to + every vertex of the $i$-th copy of $H$, where $1 ≤ i ≤ |V(G)|$. + + Parameters + ---------- + G, H: NetworkX graphs + The graphs to take the carona product of. + `G` is the center graph and `H` is the outer graph + + Returns + ------- + C: NetworkX graph + The Corona product of G and H. + + Raises + ------ + NetworkXError + If G and H are not both directed or both undirected. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> H = nx.path_graph(2) + >>> C = nx.corona_product(G, H) + >>> list(C) + [0, 1, 2, 3, (0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)] + >>> print(C) + Graph with 12 nodes and 16 edges + + References + ---------- + [1] M. Tavakoli, F. Rahbarnia, and A. R. Ashrafi, + "Studying the corona product of graphs under some graph invariants," + Transactions on Combinatorics, vol. 3, no. 3, pp. 43–49, Sep. 2014, + doi: 10.22108/toc.2014.5542. + [2] A. Faraji, "Corona Product in Graph Theory," Ali Faraji, May 11, 2021. + https://blog.alifaraji.ir/math/graph-theory/corona-product.html (accessed Dec. 07, 2021). + """ + GH = _init_product_graph(G, H) + GH.add_nodes_from(G) + GH.add_edges_from(G.edges) + + for G_node in G: + # copy nodes of H in GH, call it H_i + GH.add_nodes_from((G_node, v) for v in H) + + # copy edges of H_i based on H + GH.add_edges_from( + ((G_node, e0), (G_node, e1), d) for e0, e1, d in H.edges.data() + ) + + # creating new edges between H_i and a G's node + GH.add_edges_from((G_node, (G_node, H_node)) for H_node in H) + + return GH + + +@nx._dispatchable( + graphs=_G_H, preserve_edge_attrs=True, preserve_node_attrs=True, returns_graph=True +) +def modular_product(G, H): + r"""Returns the Modular product of G and H. + + The modular product of `G` and `H` is the graph $M = G \nabla H$, + consisting of the node set $V(M) = V(G) \times V(H)$ that is the Cartesian + product of the node sets of `G` and `H`. Further, M contains an edge ((u, v), (x, y)): + + - if u is adjacent to x in `G` and v is adjacent to y in `H`, or + - if u is not adjacent to x in `G` and v is not adjacent to y in `H`. + + More formally:: + + E(M) = {((u, v), (x, y)) | ((u, x) in E(G) and (v, y) in E(H)) or + ((u, x) not in E(G) and (v, y) not in E(H))} + + Parameters + ---------- + G, H: NetworkX graphs + The graphs to take the modular product of. + + Returns + ------- + M: NetworkX graph + The Modular product of `G` and `H`. + + Raises + ------ + NetworkXNotImplemented + If `G` is not a simple graph. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> H = nx.path_graph(2) + >>> M = nx.modular_product(G, H) + >>> list(M) + [(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1), (3, 0), (3, 1)] + >>> print(M) + Graph with 8 nodes and 8 edges + + Notes + ----- + The *modular product* is defined in [1]_ and was first + introduced as the *weak modular product*. + + The modular product reduces the problem of counting isomorphic subgraphs + in `G` and `H` to the problem of counting cliques in M. The subgraphs of + `G` and `H` that are induced by the nodes of a clique in M are + isomorphic [2]_ [3]_. + + References + ---------- + .. [1] R. Hammack, W. Imrich, and S. Klavžar, + "Handbook of Product Graphs", CRC Press, 2011. + + .. [2] H. G. Barrow and R. M. Burstall, + "Subgraph isomorphism, matching relational structures and maximal + cliques", Information Processing Letters, vol. 4, issue 4, pp. 83-84, + 1976, https://doi.org/10.1016/0020-0190(76)90049-1. + + .. [3] V. G. Vizing, "Reduction of the problem of isomorphism and isomorphic + entrance to the task of finding the nondensity of a graph." Proc. Third + All-Union Conference on Problems of Theoretical Cybernetics. 1974. + """ + if G.is_directed() or H.is_directed(): + raise nx.NetworkXNotImplemented( + "Modular product not implemented for directed graphs" + ) + if G.is_multigraph() or H.is_multigraph(): + raise nx.NetworkXNotImplemented( + "Modular product not implemented for multigraphs" + ) + + GH = _init_product_graph(G, H) + GH.add_nodes_from(_node_product(G, H)) + + for u, v, c in G.edges(data=True): + for x, y, d in H.edges(data=True): + GH.add_edge((u, x), (v, y), **_dict_product(c, d)) + GH.add_edge((v, x), (u, y), **_dict_product(c, d)) + + G = nx.complement(G) + H = nx.complement(H) + + for u, v, c in G.edges(data=True): + for x, y, d in H.edges(data=True): + GH.add_edge((u, x), (v, y), **_dict_product(c, d)) + GH.add_edge((v, x), (u, y), **_dict_product(c, d)) + + return GH diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4cd30ce Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_all.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_all.cpython-312.pyc new file mode 100644 index 0000000..9737b35 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_all.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_binary.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_binary.cpython-312.pyc new file mode 100644 index 0000000..1b30347 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_binary.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_product.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_product.cpython-312.pyc new file mode 100644 index 0000000..22eba41 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_product.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_unary.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_unary.cpython-312.pyc new file mode 100644 index 0000000..2497925 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/__pycache__/test_unary.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_all.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_all.py new file mode 100644 index 0000000..8ec29c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_all.py @@ -0,0 +1,328 @@ +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +def test_union_all_attributes(): + g = nx.Graph() + g.add_node(0, x=4) + g.add_node(1, x=5) + g.add_edge(0, 1, size=5) + g.graph["name"] = "g" + + h = g.copy() + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 + + j = g.copy() + j.graph["name"] = "j" + j.graph["attr"] = "attr" + j.nodes[0]["x"] = 7 + + ghj = nx.union_all([g, h, j], rename=("g", "h", "j")) + assert set(ghj.nodes()) == {"h0", "h1", "g0", "g1", "j0", "j1"} + for n in ghj: + graph, node = n + assert ghj.nodes[n] == eval(graph).nodes[int(node)] + + assert ghj.graph["attr"] == "attr" + assert ghj.graph["name"] == "j" # j graph attributes take precedent + + +def test_intersection_all(): + G = nx.Graph() + H = nx.Graph() + R = nx.Graph(awesome=True) + G.add_nodes_from([1, 2, 3, 4]) + G.add_edge(1, 2) + G.add_edge(2, 3) + H.add_nodes_from([1, 2, 3, 4]) + H.add_edge(2, 3) + H.add_edge(3, 4) + R.add_nodes_from([1, 2, 3, 4]) + R.add_edge(2, 3) + R.add_edge(4, 1) + I = nx.intersection_all([G, H, R]) + assert set(I.nodes()) == {1, 2, 3, 4} + assert sorted(I.edges()) == [(2, 3)] + assert I.graph == {} + + +def test_intersection_all_different_node_sets(): + G = nx.Graph() + H = nx.Graph() + R = nx.Graph() + G.add_nodes_from([1, 2, 3, 4, 6, 7]) + G.add_edge(1, 2) + G.add_edge(2, 3) + G.add_edge(6, 7) + H.add_nodes_from([1, 2, 3, 4]) + H.add_edge(2, 3) + H.add_edge(3, 4) + R.add_nodes_from([1, 2, 3, 4, 8, 9]) + R.add_edge(2, 3) + R.add_edge(4, 1) + R.add_edge(8, 9) + I = nx.intersection_all([G, H, R]) + assert set(I.nodes()) == {1, 2, 3, 4} + assert sorted(I.edges()) == [(2, 3)] + + +def test_intersection_all_attributes(): + g = nx.Graph() + g.add_node(0, x=4) + g.add_node(1, x=5) + g.add_edge(0, 1, size=5) + g.graph["name"] = "g" + + h = g.copy() + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 + + gh = nx.intersection_all([g, h]) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == sorted(g.edges()) + + +def test_intersection_all_attributes_different_node_sets(): + g = nx.Graph() + g.add_node(0, x=4) + g.add_node(1, x=5) + g.add_edge(0, 1, size=5) + g.graph["name"] = "g" + + h = g.copy() + g.add_node(2) + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 + + gh = nx.intersection_all([g, h]) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == sorted(g.edges()) + + +def test_intersection_all_multigraph_attributes(): + g = nx.MultiGraph() + g.add_edge(0, 1, key=0) + g.add_edge(0, 1, key=1) + g.add_edge(0, 1, key=2) + h = nx.MultiGraph() + h.add_edge(0, 1, key=0) + h.add_edge(0, 1, key=3) + gh = nx.intersection_all([g, h]) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [(0, 1)] + assert sorted(gh.edges(keys=True)) == [(0, 1, 0)] + + +def test_intersection_all_multigraph_attributes_different_node_sets(): + g = nx.MultiGraph() + g.add_edge(0, 1, key=0) + g.add_edge(0, 1, key=1) + g.add_edge(0, 1, key=2) + g.add_edge(1, 2, key=1) + g.add_edge(1, 2, key=2) + h = nx.MultiGraph() + h.add_edge(0, 1, key=0) + h.add_edge(0, 1, key=2) + h.add_edge(0, 1, key=3) + gh = nx.intersection_all([g, h]) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [(0, 1), (0, 1)] + assert sorted(gh.edges(keys=True)) == [(0, 1, 0), (0, 1, 2)] + + +def test_intersection_all_digraph(): + g = nx.DiGraph() + g.add_edges_from([(1, 2), (2, 3)]) + h = nx.DiGraph() + h.add_edges_from([(2, 1), (2, 3)]) + gh = nx.intersection_all([g, h]) + assert sorted(gh.edges()) == [(2, 3)] + + +def test_union_all_and_compose_all(): + K3 = nx.complete_graph(3) + P3 = nx.path_graph(3) + + G1 = nx.DiGraph() + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") + G2 = nx.DiGraph() + G2.add_edge("1", "2") + G2.add_edge("1", "3") + G2.add_edge("1", "4") + + G = nx.union_all([G1, G2]) + H = nx.compose_all([G1, G2]) + assert edges_equal(G.edges(), H.edges()) + assert not G.has_edge("A", "1") + pytest.raises(nx.NetworkXError, nx.union, K3, P3) + H1 = nx.union_all([H, G1], rename=("H", "G1")) + assert sorted(H1.nodes()) == [ + "G1A", + "G1B", + "G1C", + "G1D", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + H2 = nx.union_all([H, G2], rename=("H", "")) + assert sorted(H2.nodes()) == [ + "1", + "2", + "3", + "4", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + assert not H1.has_edge("NB", "NA") + + G = nx.compose_all([G, G]) + assert edges_equal(G.edges(), H.edges()) + + G2 = nx.union_all([G2, G2], rename=("", "copy")) + assert sorted(G2.nodes()) == [ + "1", + "2", + "3", + "4", + "copy1", + "copy2", + "copy3", + "copy4", + ] + + assert sorted(G2.neighbors("copy4")) == [] + assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"] + assert len(G) == 8 + assert nx.number_of_edges(G) == 6 + + E = nx.disjoint_union_all([G, G]) + assert len(E) == 16 + assert nx.number_of_edges(E) == 12 + + E = nx.disjoint_union_all([G1, G2]) + assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + + G1 = nx.DiGraph() + G1.add_edge("A", "B") + G2 = nx.DiGraph() + G2.add_edge(1, 2) + G3 = nx.DiGraph() + G3.add_edge(11, 22) + G4 = nx.union_all([G1, G2, G3], rename=("G1", "G2", "G3")) + assert sorted(G4.nodes()) == ["G1A", "G1B", "G21", "G22", "G311", "G322"] + + +def test_union_all_multigraph(): + G = nx.MultiGraph() + G.add_edge(1, 2, key=0) + G.add_edge(1, 2, key=1) + H = nx.MultiGraph() + H.add_edge(3, 4, key=0) + H.add_edge(3, 4, key=1) + GH = nx.union_all([G, H]) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) + + +def test_input_output(): + l = [nx.Graph([(1, 2)]), nx.Graph([(3, 4)], awesome=True)] + U = nx.disjoint_union_all(l) + assert len(l) == 2 + assert U.graph["awesome"] + C = nx.compose_all(l) + assert len(l) == 2 + l = [nx.Graph([(1, 2)]), nx.Graph([(1, 2)])] + R = nx.intersection_all(l) + assert len(l) == 2 + + +def test_mixed_type_union(): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.MultiGraph() + I = nx.Graph() + U = nx.union_all([G, H, I]) + with pytest.raises(nx.NetworkXError): + X = nx.Graph() + Y = nx.DiGraph() + XY = nx.union_all([X, Y]) + + +def test_mixed_type_disjoint_union(): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.MultiGraph() + I = nx.Graph() + U = nx.disjoint_union_all([G, H, I]) + with pytest.raises(nx.NetworkXError): + X = nx.Graph() + Y = nx.DiGraph() + XY = nx.disjoint_union_all([X, Y]) + + +def test_mixed_type_intersection(): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.MultiGraph() + I = nx.Graph() + U = nx.intersection_all([G, H, I]) + with pytest.raises(nx.NetworkXError): + X = nx.Graph() + Y = nx.DiGraph() + XY = nx.intersection_all([X, Y]) + + +def test_mixed_type_compose(): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.MultiGraph() + I = nx.Graph() + U = nx.compose_all([G, H, I]) + with pytest.raises(nx.NetworkXError): + X = nx.Graph() + Y = nx.DiGraph() + XY = nx.compose_all([X, Y]) + + +def test_empty_union(): + with pytest.raises(ValueError): + nx.union_all([]) + + +def test_empty_disjoint_union(): + with pytest.raises(ValueError): + nx.disjoint_union_all([]) + + +def test_empty_compose_all(): + with pytest.raises(ValueError): + nx.compose_all([]) + + +def test_empty_intersection_all(): + with pytest.raises(ValueError): + nx.intersection_all([]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_binary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_binary.py new file mode 100644 index 0000000..e8e534e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_binary.py @@ -0,0 +1,451 @@ +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +def test_union_attributes(): + g = nx.Graph() + g.add_node(0, x=4) + g.add_node(1, x=5) + g.add_edge(0, 1, size=5) + g.graph["name"] = "g" + + h = g.copy() + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 + + gh = nx.union(g, h, rename=("g", "h")) + assert set(gh.nodes()) == {"h0", "h1", "g0", "g1"} + for n in gh: + graph, node = n + assert gh.nodes[n] == eval(graph).nodes[int(node)] + + assert gh.graph["attr"] == "attr" + assert gh.graph["name"] == "h" # h graph attributes take precedent + + +def test_intersection(): + G = nx.Graph() + H = nx.Graph() + G.add_nodes_from([1, 2, 3, 4]) + G.add_edge(1, 2) + G.add_edge(2, 3) + H.add_nodes_from([1, 2, 3, 4]) + H.add_edge(2, 3) + H.add_edge(3, 4) + I = nx.intersection(G, H) + assert set(I.nodes()) == {1, 2, 3, 4} + assert sorted(I.edges()) == [(2, 3)] + + +def test_intersection_node_sets_different(): + G = nx.Graph() + H = nx.Graph() + G.add_nodes_from([1, 2, 3, 4, 7]) + G.add_edge(1, 2) + G.add_edge(2, 3) + H.add_nodes_from([1, 2, 3, 4, 5, 6]) + H.add_edge(2, 3) + H.add_edge(3, 4) + H.add_edge(5, 6) + I = nx.intersection(G, H) + assert set(I.nodes()) == {1, 2, 3, 4} + assert sorted(I.edges()) == [(2, 3)] + + +def test_intersection_attributes(): + g = nx.Graph() + g.add_node(0, x=4) + g.add_node(1, x=5) + g.add_edge(0, 1, size=5) + g.graph["name"] = "g" + + h = g.copy() + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 + gh = nx.intersection(g, h) + + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == sorted(g.edges()) + + +def test_intersection_attributes_node_sets_different(): + g = nx.Graph() + g.add_node(0, x=4) + g.add_node(1, x=5) + g.add_node(2, x=3) + g.add_edge(0, 1, size=5) + g.graph["name"] = "g" + + h = g.copy() + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 + h.remove_node(2) + + gh = nx.intersection(g, h) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == sorted(g.edges()) + + +def test_intersection_multigraph_attributes(): + g = nx.MultiGraph() + g.add_edge(0, 1, key=0) + g.add_edge(0, 1, key=1) + g.add_edge(0, 1, key=2) + h = nx.MultiGraph() + h.add_edge(0, 1, key=0) + h.add_edge(0, 1, key=3) + gh = nx.intersection(g, h) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [(0, 1)] + assert sorted(gh.edges(keys=True)) == [(0, 1, 0)] + + +def test_intersection_multigraph_attributes_node_set_different(): + g = nx.MultiGraph() + g.add_edge(0, 1, key=0) + g.add_edge(0, 1, key=1) + g.add_edge(0, 1, key=2) + g.add_edge(0, 2, key=2) + g.add_edge(0, 2, key=1) + h = nx.MultiGraph() + h.add_edge(0, 1, key=0) + h.add_edge(0, 1, key=3) + gh = nx.intersection(g, h) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [(0, 1)] + assert sorted(gh.edges(keys=True)) == [(0, 1, 0)] + + +def test_difference(): + G = nx.Graph() + H = nx.Graph() + G.add_nodes_from([1, 2, 3, 4]) + G.add_edge(1, 2) + G.add_edge(2, 3) + H.add_nodes_from([1, 2, 3, 4]) + H.add_edge(2, 3) + H.add_edge(3, 4) + D = nx.difference(G, H) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(1, 2)] + D = nx.difference(H, G) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(3, 4)] + D = nx.symmetric_difference(G, H) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(1, 2), (3, 4)] + + +def test_difference2(): + G = nx.Graph() + H = nx.Graph() + G.add_nodes_from([1, 2, 3, 4]) + H.add_nodes_from([1, 2, 3, 4]) + G.add_edge(1, 2) + H.add_edge(1, 2) + G.add_edge(2, 3) + D = nx.difference(G, H) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(2, 3)] + D = nx.difference(H, G) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [] + H.add_edge(3, 4) + D = nx.difference(H, G) + assert set(D.nodes()) == {1, 2, 3, 4} + assert sorted(D.edges()) == [(3, 4)] + + +def test_difference_attributes(): + g = nx.Graph() + g.add_node(0, x=4) + g.add_node(1, x=5) + g.add_edge(0, 1, size=5) + g.graph["name"] = "g" + + h = g.copy() + h.graph["name"] = "h" + h.graph["attr"] = "attr" + h.nodes[0]["x"] = 7 + + gh = nx.difference(g, h) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [] + # node and graph data should not be copied over + assert gh.nodes.data() != g.nodes.data() + assert gh.graph != g.graph + + +def test_difference_multigraph_attributes(): + g = nx.MultiGraph() + g.add_edge(0, 1, key=0) + g.add_edge(0, 1, key=1) + g.add_edge(0, 1, key=2) + h = nx.MultiGraph() + h.add_edge(0, 1, key=0) + h.add_edge(0, 1, key=3) + gh = nx.difference(g, h) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == [(0, 1), (0, 1)] + assert sorted(gh.edges(keys=True)) == [(0, 1, 1), (0, 1, 2)] + + +def test_difference_raise(): + G = nx.path_graph(4) + H = nx.path_graph(3) + pytest.raises(nx.NetworkXError, nx.difference, G, H) + pytest.raises(nx.NetworkXError, nx.symmetric_difference, G, H) + + +def test_symmetric_difference_multigraph(): + g = nx.MultiGraph() + g.add_edge(0, 1, key=0) + g.add_edge(0, 1, key=1) + g.add_edge(0, 1, key=2) + h = nx.MultiGraph() + h.add_edge(0, 1, key=0) + h.add_edge(0, 1, key=3) + gh = nx.symmetric_difference(g, h) + assert set(gh.nodes()) == set(g.nodes()) + assert set(gh.nodes()) == set(h.nodes()) + assert sorted(gh.edges()) == 3 * [(0, 1)] + assert sorted(sorted(e) for e in gh.edges(keys=True)) == [ + [0, 1, 1], + [0, 1, 2], + [0, 1, 3], + ] + + +def test_union_and_compose(): + K3 = nx.complete_graph(3) + P3 = nx.path_graph(3) + + G1 = nx.DiGraph() + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") + G2 = nx.DiGraph() + G2.add_edge("1", "2") + G2.add_edge("1", "3") + G2.add_edge("1", "4") + + G = nx.union(G1, G2) + H = nx.compose(G1, G2) + assert edges_equal(G.edges(), H.edges()) + assert not G.has_edge("A", 1) + pytest.raises(nx.NetworkXError, nx.union, K3, P3) + H1 = nx.union(H, G1, rename=("H", "G1")) + assert sorted(H1.nodes()) == [ + "G1A", + "G1B", + "G1C", + "G1D", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + H2 = nx.union(H, G2, rename=("H", "")) + assert sorted(H2.nodes()) == [ + "1", + "2", + "3", + "4", + "H1", + "H2", + "H3", + "H4", + "HA", + "HB", + "HC", + "HD", + ] + + assert not H1.has_edge("NB", "NA") + + G = nx.compose(G, G) + assert edges_equal(G.edges(), H.edges()) + + G2 = nx.union(G2, G2, rename=("", "copy")) + assert sorted(G2.nodes()) == [ + "1", + "2", + "3", + "4", + "copy1", + "copy2", + "copy3", + "copy4", + ] + + assert sorted(G2.neighbors("copy4")) == [] + assert sorted(G2.neighbors("copy1")) == ["copy2", "copy3", "copy4"] + assert len(G) == 8 + assert nx.number_of_edges(G) == 6 + + E = nx.disjoint_union(G, G) + assert len(E) == 16 + assert nx.number_of_edges(E) == 12 + + E = nx.disjoint_union(G1, G2) + assert sorted(E.nodes()) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + + G = nx.Graph() + H = nx.Graph() + G.add_nodes_from([(1, {"a1": 1})]) + H.add_nodes_from([(1, {"b1": 1})]) + R = nx.compose(G, H) + assert R.nodes == {1: {"a1": 1, "b1": 1}} + + +def test_union_multigraph(): + G = nx.MultiGraph() + G.add_edge(1, 2, key=0) + G.add_edge(1, 2, key=1) + H = nx.MultiGraph() + H.add_edge(3, 4, key=0) + H.add_edge(3, 4, key=1) + GH = nx.union(G, H) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) + + +def test_disjoint_union_multigraph(): + G = nx.MultiGraph() + G.add_edge(0, 1, key=0) + G.add_edge(0, 1, key=1) + H = nx.MultiGraph() + H.add_edge(2, 3, key=0) + H.add_edge(2, 3, key=1) + GH = nx.disjoint_union(G, H) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) + + +def test_compose_multigraph(): + G = nx.MultiGraph() + G.add_edge(1, 2, key=0) + G.add_edge(1, 2, key=1) + H = nx.MultiGraph() + H.add_edge(3, 4, key=0) + H.add_edge(3, 4, key=1) + GH = nx.compose(G, H) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) + H.add_edge(1, 2, key=2) + GH = nx.compose(G, H) + assert set(GH) == set(G) | set(H) + assert set(GH.edges(keys=True)) == set(G.edges(keys=True)) | set(H.edges(keys=True)) + + +def test_full_join_graph(): + # Simple Graphs + G = nx.Graph() + G.add_node(0) + G.add_edge(1, 2) + H = nx.Graph() + H.add_edge(3, 4) + + U = nx.full_join(G, H) + assert set(U) == set(G) | set(H) + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) + + # Rename + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) + + # Rename graphs with string-like nodes + G = nx.Graph() + G.add_node("a") + G.add_edge("b", "c") + H = nx.Graph() + H.add_edge("d", "e") + + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"ga", "gb", "gc", "hd", "he"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) + + # DiGraphs + G = nx.DiGraph() + G.add_node(0) + G.add_edge(1, 2) + H = nx.DiGraph() + H.add_edge(3, 4) + + U = nx.full_join(G, H) + assert set(U) == set(G) | set(H) + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 + + # DiGraphs Rename + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 + + +def test_full_join_multigraph(): + # MultiGraphs + G = nx.MultiGraph() + G.add_node(0) + G.add_edge(1, 2) + H = nx.MultiGraph() + H.add_edge(3, 4) + + U = nx.full_join(G, H) + assert set(U) == set(G) | set(H) + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) + + # MultiGraphs rename + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) + + # MultiDiGraphs + G = nx.MultiDiGraph() + G.add_node(0) + G.add_edge(1, 2) + H = nx.MultiDiGraph() + H.add_edge(3, 4) + + U = nx.full_join(G, H) + assert set(U) == set(G) | set(H) + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 + + # MultiDiGraphs rename + U = nx.full_join(G, H, rename=("g", "h")) + assert set(U) == {"g0", "g1", "g2", "h3", "h4"} + assert len(U) == len(G) + len(H) + assert len(U.edges()) == len(G.edges()) + len(H.edges()) + len(G) * len(H) * 2 + + +def test_mixed_type_union(): + G = nx.Graph() + H = nx.MultiGraph() + pytest.raises(nx.NetworkXError, nx.union, G, H) + pytest.raises(nx.NetworkXError, nx.disjoint_union, G, H) + pytest.raises(nx.NetworkXError, nx.intersection, G, H) + pytest.raises(nx.NetworkXError, nx.difference, G, H) + pytest.raises(nx.NetworkXError, nx.symmetric_difference, G, H) + pytest.raises(nx.NetworkXError, nx.compose, G, H) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_product.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_product.py new file mode 100644 index 0000000..8ee54b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_product.py @@ -0,0 +1,491 @@ +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +def test_tensor_product_raises(): + with pytest.raises(nx.NetworkXError): + P = nx.tensor_product(nx.DiGraph(), nx.Graph()) + + +def test_tensor_product_null(): + null = nx.null_graph() + empty10 = nx.empty_graph(10) + K3 = nx.complete_graph(3) + K10 = nx.complete_graph(10) + P3 = nx.path_graph(3) + P10 = nx.path_graph(10) + # null graph + G = nx.tensor_product(null, null) + assert nx.is_isomorphic(G, null) + # null_graph X anything = null_graph and v.v. + G = nx.tensor_product(null, empty10) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(null, K3) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(null, K10) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(null, P3) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(null, P10) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(empty10, null) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(K3, null) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(K10, null) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(P3, null) + assert nx.is_isomorphic(G, null) + G = nx.tensor_product(P10, null) + assert nx.is_isomorphic(G, null) + + +def test_tensor_product_size(): + P5 = nx.path_graph(5) + K3 = nx.complete_graph(3) + K5 = nx.complete_graph(5) + + G = nx.tensor_product(P5, K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.tensor_product(K3, K5) + assert nx.number_of_nodes(G) == 3 * 5 + + +def test_tensor_product_combinations(): + # basic smoke test, more realistic tests would be useful + P5 = nx.path_graph(5) + K3 = nx.complete_graph(3) + G = nx.tensor_product(P5, K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.tensor_product(P5, nx.MultiGraph(K3)) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.tensor_product(nx.MultiGraph(P5), K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.tensor_product(nx.MultiGraph(P5), nx.MultiGraph(K3)) + assert nx.number_of_nodes(G) == 5 * 3 + + G = nx.tensor_product(nx.DiGraph(P5), nx.DiGraph(K3)) + assert nx.number_of_nodes(G) == 5 * 3 + + +def test_tensor_product_classic_result(): + K2 = nx.complete_graph(2) + G = nx.petersen_graph() + G = nx.tensor_product(G, K2) + assert nx.is_isomorphic(G, nx.desargues_graph()) + + G = nx.cycle_graph(5) + G = nx.tensor_product(G, K2) + assert nx.is_isomorphic(G, nx.cycle_graph(10)) + + G = nx.tetrahedral_graph() + G = nx.tensor_product(G, K2) + assert nx.is_isomorphic(G, nx.cubical_graph()) + + +def test_tensor_product_random(): + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) + GH = nx.tensor_product(G, H) + + for u_G, u_H in GH.nodes(): + for v_G, v_H in GH.nodes(): + if H.has_edge(u_H, v_H) and G.has_edge(u_G, v_G): + assert GH.has_edge((u_G, u_H), (v_G, v_H)) + else: + assert not GH.has_edge((u_G, u_H), (v_G, v_H)) + + +def test_cartesian_product_multigraph(): + G = nx.MultiGraph() + G.add_edge(1, 2, key=0) + G.add_edge(1, 2, key=1) + H = nx.MultiGraph() + H.add_edge(3, 4, key=0) + H.add_edge(3, 4, key=1) + GH = nx.cartesian_product(G, H) + assert set(GH) == {(1, 3), (2, 3), (2, 4), (1, 4)} + assert {(frozenset([u, v]), k) for u, v, k in GH.edges(keys=True)} == { + (frozenset([u, v]), k) + for u, v, k in [ + ((1, 3), (2, 3), 0), + ((1, 3), (2, 3), 1), + ((1, 3), (1, 4), 0), + ((1, 3), (1, 4), 1), + ((2, 3), (2, 4), 0), + ((2, 3), (2, 4), 1), + ((2, 4), (1, 4), 0), + ((2, 4), (1, 4), 1), + ] + } + + +def test_cartesian_product_raises(): + with pytest.raises(nx.NetworkXError): + P = nx.cartesian_product(nx.DiGraph(), nx.Graph()) + + +def test_cartesian_product_null(): + null = nx.null_graph() + empty10 = nx.empty_graph(10) + K3 = nx.complete_graph(3) + K10 = nx.complete_graph(10) + P3 = nx.path_graph(3) + P10 = nx.path_graph(10) + # null graph + G = nx.cartesian_product(null, null) + assert nx.is_isomorphic(G, null) + # null_graph X anything = null_graph and v.v. + G = nx.cartesian_product(null, empty10) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(null, K3) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(null, K10) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(null, P3) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(null, P10) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(empty10, null) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(K3, null) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(K10, null) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(P3, null) + assert nx.is_isomorphic(G, null) + G = nx.cartesian_product(P10, null) + assert nx.is_isomorphic(G, null) + + +def test_cartesian_product_size(): + # order(GXH)=order(G)*order(H) + K5 = nx.complete_graph(5) + P5 = nx.path_graph(5) + K3 = nx.complete_graph(3) + G = nx.cartesian_product(P5, K3) + assert nx.number_of_nodes(G) == 5 * 3 + assert nx.number_of_edges(G) == nx.number_of_edges(P5) * nx.number_of_nodes( + K3 + ) + nx.number_of_edges(K3) * nx.number_of_nodes(P5) + G = nx.cartesian_product(K3, K5) + assert nx.number_of_nodes(G) == 3 * 5 + assert nx.number_of_edges(G) == nx.number_of_edges(K5) * nx.number_of_nodes( + K3 + ) + nx.number_of_edges(K3) * nx.number_of_nodes(K5) + + +def test_cartesian_product_classic(): + # test some classic product graphs + P2 = nx.path_graph(2) + P3 = nx.path_graph(3) + # cube = 2-path X 2-path + G = nx.cartesian_product(P2, P2) + G = nx.cartesian_product(P2, G) + assert nx.is_isomorphic(G, nx.cubical_graph()) + + # 3x3 grid + G = nx.cartesian_product(P3, P3) + assert nx.is_isomorphic(G, nx.grid_2d_graph(3, 3)) + + +def test_cartesian_product_random(): + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) + GH = nx.cartesian_product(G, H) + + for u_G, u_H in GH.nodes(): + for v_G, v_H in GH.nodes(): + if (u_G == v_G and H.has_edge(u_H, v_H)) or ( + u_H == v_H and G.has_edge(u_G, v_G) + ): + assert GH.has_edge((u_G, u_H), (v_G, v_H)) + else: + assert not GH.has_edge((u_G, u_H), (v_G, v_H)) + + +def test_lexicographic_product_raises(): + with pytest.raises(nx.NetworkXError): + P = nx.lexicographic_product(nx.DiGraph(), nx.Graph()) + + +def test_lexicographic_product_null(): + null = nx.null_graph() + empty10 = nx.empty_graph(10) + K3 = nx.complete_graph(3) + K10 = nx.complete_graph(10) + P3 = nx.path_graph(3) + P10 = nx.path_graph(10) + # null graph + G = nx.lexicographic_product(null, null) + assert nx.is_isomorphic(G, null) + # null_graph X anything = null_graph and v.v. + G = nx.lexicographic_product(null, empty10) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(null, K3) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(null, K10) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(null, P3) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(null, P10) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(empty10, null) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(K3, null) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(K10, null) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(P3, null) + assert nx.is_isomorphic(G, null) + G = nx.lexicographic_product(P10, null) + assert nx.is_isomorphic(G, null) + + +def test_lexicographic_product_size(): + K5 = nx.complete_graph(5) + P5 = nx.path_graph(5) + K3 = nx.complete_graph(3) + G = nx.lexicographic_product(P5, K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.lexicographic_product(K3, K5) + assert nx.number_of_nodes(G) == 3 * 5 + + +def test_lexicographic_product_combinations(): + P5 = nx.path_graph(5) + K3 = nx.complete_graph(3) + G = nx.lexicographic_product(P5, K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.lexicographic_product(nx.MultiGraph(P5), K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.lexicographic_product(P5, nx.MultiGraph(K3)) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.lexicographic_product(nx.MultiGraph(P5), nx.MultiGraph(K3)) + assert nx.number_of_nodes(G) == 5 * 3 + + # No classic easily found classic results for lexicographic product + + +def test_lexicographic_product_random(): + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) + GH = nx.lexicographic_product(G, H) + + for u_G, u_H in GH.nodes(): + for v_G, v_H in GH.nodes(): + if G.has_edge(u_G, v_G) or (u_G == v_G and H.has_edge(u_H, v_H)): + assert GH.has_edge((u_G, u_H), (v_G, v_H)) + else: + assert not GH.has_edge((u_G, u_H), (v_G, v_H)) + + +def test_strong_product_raises(): + with pytest.raises(nx.NetworkXError): + P = nx.strong_product(nx.DiGraph(), nx.Graph()) + + +def test_strong_product_null(): + null = nx.null_graph() + empty10 = nx.empty_graph(10) + K3 = nx.complete_graph(3) + K10 = nx.complete_graph(10) + P3 = nx.path_graph(3) + P10 = nx.path_graph(10) + # null graph + G = nx.strong_product(null, null) + assert nx.is_isomorphic(G, null) + # null_graph X anything = null_graph and v.v. + G = nx.strong_product(null, empty10) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(null, K3) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(null, K10) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(null, P3) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(null, P10) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(empty10, null) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(K3, null) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(K10, null) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(P3, null) + assert nx.is_isomorphic(G, null) + G = nx.strong_product(P10, null) + assert nx.is_isomorphic(G, null) + + +def test_strong_product_size(): + K5 = nx.complete_graph(5) + P5 = nx.path_graph(5) + K3 = nx.complete_graph(3) + G = nx.strong_product(P5, K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.strong_product(K3, K5) + assert nx.number_of_nodes(G) == 3 * 5 + + +def test_strong_product_combinations(): + P5 = nx.path_graph(5) + K3 = nx.complete_graph(3) + G = nx.strong_product(P5, K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.strong_product(nx.MultiGraph(P5), K3) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.strong_product(P5, nx.MultiGraph(K3)) + assert nx.number_of_nodes(G) == 5 * 3 + G = nx.strong_product(nx.MultiGraph(P5), nx.MultiGraph(K3)) + assert nx.number_of_nodes(G) == 5 * 3 + + # No classic easily found classic results for strong product + + +def test_strong_product_random(): + G = nx.erdos_renyi_graph(10, 2 / 10.0) + H = nx.erdos_renyi_graph(10, 2 / 10.0) + GH = nx.strong_product(G, H) + + for u_G, u_H in GH.nodes(): + for v_G, v_H in GH.nodes(): + if ( + (u_G == v_G and H.has_edge(u_H, v_H)) + or (u_H == v_H and G.has_edge(u_G, v_G)) + or (G.has_edge(u_G, v_G) and H.has_edge(u_H, v_H)) + ): + assert GH.has_edge((u_G, u_H), (v_G, v_H)) + else: + assert not GH.has_edge((u_G, u_H), (v_G, v_H)) + + +def test_graph_power_raises(): + with pytest.raises(nx.NetworkXNotImplemented): + nx.power(nx.MultiDiGraph(), 2) + + +def test_graph_power(): + # wikipedia example for graph power + G = nx.cycle_graph(7) + G.add_edge(6, 7) + G.add_edge(7, 8) + G.add_edge(8, 9) + G.add_edge(9, 2) + H = nx.power(G, 2) + + assert edges_equal( + list(H.edges()), + [ + (0, 1), + (0, 2), + (0, 5), + (0, 6), + (0, 7), + (1, 9), + (1, 2), + (1, 3), + (1, 6), + (2, 3), + (2, 4), + (2, 8), + (2, 9), + (3, 4), + (3, 5), + (3, 9), + (4, 5), + (4, 6), + (5, 6), + (5, 7), + (6, 7), + (6, 8), + (7, 8), + (7, 9), + (8, 9), + ], + ) + + +def test_graph_power_negative(): + with pytest.raises(ValueError): + nx.power(nx.Graph(), -1) + + +def test_rooted_product_raises(): + with pytest.raises(nx.NodeNotFound): + nx.rooted_product(nx.Graph(), nx.path_graph(2), 10) + + +def test_rooted_product(): + G = nx.cycle_graph(5) + H = nx.Graph() + H.add_edges_from([("a", "b"), ("b", "c"), ("b", "d")]) + R = nx.rooted_product(G, H, "a") + assert len(R) == len(G) * len(H) + assert R.size() == G.size() + len(G) * H.size() + + +def test_corona_product(): + G = nx.cycle_graph(3) + H = nx.path_graph(2) + C = nx.corona_product(G, H) + assert len(C) == (len(G) * len(H)) + len(G) + assert C.size() == G.size() + len(G) * H.size() + len(G) * len(H) + + +def test_modular_product(): + G = nx.path_graph(3) + H = nx.path_graph(4) + M = nx.modular_product(G, H) + assert len(M) == len(G) * len(H) + + assert edges_equal( + list(M.edges()), + [ + ((0, 0), (1, 1)), + ((0, 0), (2, 2)), + ((0, 0), (2, 3)), + ((0, 1), (1, 0)), + ((0, 1), (1, 2)), + ((0, 1), (2, 3)), + ((0, 2), (1, 1)), + ((0, 2), (1, 3)), + ((0, 2), (2, 0)), + ((0, 3), (1, 2)), + ((0, 3), (2, 0)), + ((0, 3), (2, 1)), + ((1, 0), (2, 1)), + ((1, 1), (2, 0)), + ((1, 1), (2, 2)), + ((1, 2), (2, 1)), + ((1, 2), (2, 3)), + ((1, 3), (2, 2)), + ], + ) + + +def test_modular_product_raises(): + G = nx.Graph([(0, 1), (1, 2), (2, 0)]) + H = nx.Graph([(0, 1), (1, 2), (2, 0)]) + DG = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + DH = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(G, DH) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(DG, H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(DG, DH) + + MG = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)]) + MH = nx.MultiGraph([(0, 1), (1, 2), (2, 0), (0, 1)]) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(G, MH) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(MG, H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.modular_product(MG, MH) + with pytest.raises(nx.NetworkXNotImplemented): + # check multigraph with no multiedges + nx.modular_product(nx.MultiGraph(G), H) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_unary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_unary.py new file mode 100644 index 0000000..d68e55c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/tests/test_unary.py @@ -0,0 +1,55 @@ +import pytest + +import networkx as nx + + +def test_complement(): + null = nx.null_graph() + empty1 = nx.empty_graph(1) + empty10 = nx.empty_graph(10) + K3 = nx.complete_graph(3) + K5 = nx.complete_graph(5) + K10 = nx.complete_graph(10) + P2 = nx.path_graph(2) + P3 = nx.path_graph(3) + P5 = nx.path_graph(5) + P10 = nx.path_graph(10) + # complement of the complete graph is empty + + G = nx.complement(K3) + assert nx.is_isomorphic(G, nx.empty_graph(3)) + G = nx.complement(K5) + assert nx.is_isomorphic(G, nx.empty_graph(5)) + # for any G, G=complement(complement(G)) + P3cc = nx.complement(nx.complement(P3)) + assert nx.is_isomorphic(P3, P3cc) + nullcc = nx.complement(nx.complement(null)) + assert nx.is_isomorphic(null, nullcc) + b = nx.bull_graph() + bcc = nx.complement(nx.complement(b)) + assert nx.is_isomorphic(b, bcc) + + +def test_complement_2(): + G1 = nx.DiGraph() + G1.add_edge("A", "B") + G1.add_edge("A", "C") + G1.add_edge("A", "D") + G1C = nx.complement(G1) + assert sorted(G1C.edges()) == [ + ("B", "A"), + ("B", "C"), + ("B", "D"), + ("C", "A"), + ("C", "B"), + ("C", "D"), + ("D", "A"), + ("D", "B"), + ("D", "C"), + ] + + +def test_reverse1(): + # Other tests for reverse are done by the DiGraph and MultiDigraph. + G1 = nx.Graph() + pytest.raises(nx.NetworkXError, nx.reverse, G1) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/unary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/unary.py new file mode 100644 index 0000000..79e44d1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/operators/unary.py @@ -0,0 +1,77 @@ +"""Unary operations on graphs""" + +import networkx as nx + +__all__ = ["complement", "reverse"] + + +@nx._dispatchable(returns_graph=True) +def complement(G): + """Returns the graph complement of G. + + Parameters + ---------- + G : graph + A NetworkX graph + + Returns + ------- + GC : A new graph. + + Notes + ----- + Note that `complement` does not create self-loops and also + does not produce parallel edges for MultiGraphs. + + Graph, node, and edge data are not propagated to the new graph. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)]) + >>> G_complement = nx.complement(G) + >>> G_complement.edges() # This shows the edges of the complemented graph + EdgeView([(1, 4), (1, 5), (2, 4), (2, 5), (4, 5)]) + + """ + R = G.__class__() + R.add_nodes_from(G) + R.add_edges_from( + ((n, n2) for n, nbrs in G.adjacency() for n2 in G if n2 not in nbrs if n != n2) + ) + return R + + +@nx._dispatchable(returns_graph=True) +def reverse(G, copy=True): + """Returns the reverse directed graph of G. + + Parameters + ---------- + G : directed graph + A NetworkX directed graph + copy : bool + If True, then a new graph is returned. If False, then the graph is + reversed in place. + + Returns + ------- + H : directed graph + The reversed G. + + Raises + ------ + NetworkXError + If graph is undirected. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 4), (3, 5)]) + >>> G_reversed = nx.reverse(G) + >>> G_reversed.edges() + OutEdgeView([(2, 1), (3, 1), (3, 2), (4, 3), (5, 3)]) + + """ + if not G.is_directed(): + raise nx.NetworkXError("Cannot reverse an undirected graph.") + else: + return G.reverse(copy=copy) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/planar_drawing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/planar_drawing.py new file mode 100644 index 0000000..ea25809 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/planar_drawing.py @@ -0,0 +1,464 @@ +from collections import defaultdict + +import networkx as nx + +__all__ = ["combinatorial_embedding_to_pos"] + + +def combinatorial_embedding_to_pos(embedding, fully_triangulate=False): + """Assigns every node a (x, y) position based on the given embedding + + The algorithm iteratively inserts nodes of the input graph in a certain + order and rearranges previously inserted nodes so that the planar drawing + stays valid. This is done efficiently by only maintaining relative + positions during the node placements and calculating the absolute positions + at the end. For more information see [1]_. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + This defines the order of the edges + + fully_triangulate : bool + If set to True the algorithm adds edges to a copy of the input + embedding and makes it chordal. + + Returns + ------- + pos : dict + Maps each node to a tuple that defines the (x, y) position + + References + ---------- + .. [1] M. Chrobak and T.H. Payne: + A Linear-time Algorithm for Drawing a Planar Graph on a Grid 1989 + http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.51.6677 + + """ + if len(embedding.nodes()) < 4: + # Position the node in any triangle + default_positions = [(0, 0), (2, 0), (1, 1)] + pos = {} + for i, v in enumerate(embedding.nodes()): + pos[v] = default_positions[i] + return pos + + embedding, outer_face = triangulate_embedding(embedding, fully_triangulate) + + # The following dicts map a node to another node + # If a node is not in the key set it means that the node is not yet in G_k + # If a node maps to None then the corresponding subtree does not exist + left_t_child = {} + right_t_child = {} + + # The following dicts map a node to an integer + delta_x = {} + y_coordinate = {} + + node_list = get_canonical_ordering(embedding, outer_face) + + # 1. Phase: Compute relative positions + + # Initialization + v1, v2, v3 = node_list[0][0], node_list[1][0], node_list[2][0] + + delta_x[v1] = 0 + y_coordinate[v1] = 0 + right_t_child[v1] = v3 + left_t_child[v1] = None + + delta_x[v2] = 1 + y_coordinate[v2] = 0 + right_t_child[v2] = None + left_t_child[v2] = None + + delta_x[v3] = 1 + y_coordinate[v3] = 1 + right_t_child[v3] = v2 + left_t_child[v3] = None + + for k in range(3, len(node_list)): + vk, contour_nbrs = node_list[k] + wp = contour_nbrs[0] + wp1 = contour_nbrs[1] + wq = contour_nbrs[-1] + wq1 = contour_nbrs[-2] + adds_mult_tri = len(contour_nbrs) > 2 + + # Stretch gaps: + delta_x[wp1] += 1 + delta_x[wq] += 1 + + delta_x_wp_wq = sum(delta_x[x] for x in contour_nbrs[1:]) + + # Adjust offsets + delta_x[vk] = (-y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 + y_coordinate[vk] = (y_coordinate[wp] + delta_x_wp_wq + y_coordinate[wq]) // 2 + delta_x[wq] = delta_x_wp_wq - delta_x[vk] + if adds_mult_tri: + delta_x[wp1] -= delta_x[vk] + + # Install v_k: + right_t_child[wp] = vk + right_t_child[vk] = wq + if adds_mult_tri: + left_t_child[vk] = wp1 + right_t_child[wq1] = None + else: + left_t_child[vk] = None + + # 2. Phase: Set absolute positions + pos = {} + pos[v1] = (0, y_coordinate[v1]) + remaining_nodes = [v1] + while remaining_nodes: + parent_node = remaining_nodes.pop() + + # Calculate position for left child + set_position( + parent_node, left_t_child, remaining_nodes, delta_x, y_coordinate, pos + ) + # Calculate position for right child + set_position( + parent_node, right_t_child, remaining_nodes, delta_x, y_coordinate, pos + ) + return pos + + +def set_position(parent, tree, remaining_nodes, delta_x, y_coordinate, pos): + """Helper method to calculate the absolute position of nodes.""" + child = tree[parent] + parent_node_x = pos[parent][0] + if child is not None: + # Calculate pos of child + child_x = parent_node_x + delta_x[child] + pos[child] = (child_x, y_coordinate[child]) + # Remember to calculate pos of its children + remaining_nodes.append(child) + + +def get_canonical_ordering(embedding, outer_face): + """Returns a canonical ordering of the nodes + + The canonical ordering of nodes (v1, ..., vn) must fulfill the following + conditions: + (See Lemma 1 in [2]_) + + - For the subgraph G_k of the input graph induced by v1, ..., vk it holds: + - 2-connected + - internally triangulated + - the edge (v1, v2) is part of the outer face + - For a node v(k+1) the following holds: + - The node v(k+1) is part of the outer face of G_k + - It has at least two neighbors in G_k + - All neighbors of v(k+1) in G_k lie consecutively on the outer face of + G_k (excluding the edge (v1, v2)). + + The algorithm used here starts with G_n (containing all nodes). It first + selects the nodes v1 and v2. And then tries to find the order of the other + nodes by checking which node can be removed in order to fulfill the + conditions mentioned above. This is done by calculating the number of + chords of nodes on the outer face. For more information see [1]_. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + The embedding must be triangulated + outer_face : list + The nodes on the outer face of the graph + + Returns + ------- + ordering : list + A list of tuples `(vk, wp_wq)`. Here `vk` is the node at this position + in the canonical ordering. The element `wp_wq` is a list of nodes that + make up the outer face of G_k. + + References + ---------- + .. [1] Steven Chaplick. + Canonical Orders of Planar Graphs and (some of) Their Applications 2015 + https://wuecampus2.uni-wuerzburg.de/moodle/pluginfile.php/545727/mod_resource/content/0/vg-ss15-vl03-canonical-orders-druckversion.pdf + .. [2] M. Chrobak and T.H. Payne: + A Linear-time Algorithm for Drawing a Planar Graph on a Grid 1989 + http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.51.6677 + + """ + v1 = outer_face[0] + v2 = outer_face[1] + chords = defaultdict(int) # Maps nodes to the number of their chords + marked_nodes = set() + ready_to_pick = set(outer_face) + + # Initialize outer_face_ccw_nbr (do not include v1 -> v2) + outer_face_ccw_nbr = {} + prev_nbr = v2 + for idx in range(2, len(outer_face)): + outer_face_ccw_nbr[prev_nbr] = outer_face[idx] + prev_nbr = outer_face[idx] + outer_face_ccw_nbr[prev_nbr] = v1 + + # Initialize outer_face_cw_nbr (do not include v2 -> v1) + outer_face_cw_nbr = {} + prev_nbr = v1 + for idx in range(len(outer_face) - 1, 0, -1): + outer_face_cw_nbr[prev_nbr] = outer_face[idx] + prev_nbr = outer_face[idx] + + def is_outer_face_nbr(x, y): + if x not in outer_face_ccw_nbr: + return outer_face_cw_nbr[x] == y + if x not in outer_face_cw_nbr: + return outer_face_ccw_nbr[x] == y + return outer_face_ccw_nbr[x] == y or outer_face_cw_nbr[x] == y + + def is_on_outer_face(x): + return x not in marked_nodes and (x in outer_face_ccw_nbr or x == v1) + + # Initialize number of chords + for v in outer_face: + for nbr in embedding.neighbors_cw_order(v): + if is_on_outer_face(nbr) and not is_outer_face_nbr(v, nbr): + chords[v] += 1 + ready_to_pick.discard(v) + + # Initialize canonical_ordering + canonical_ordering = [None] * len(embedding.nodes()) + canonical_ordering[0] = (v1, []) + canonical_ordering[1] = (v2, []) + ready_to_pick.discard(v1) + ready_to_pick.discard(v2) + + for k in range(len(embedding.nodes()) - 1, 1, -1): + # 1. Pick v from ready_to_pick + v = ready_to_pick.pop() + marked_nodes.add(v) + + # v has exactly two neighbors on the outer face (wp and wq) + wp = None + wq = None + # Iterate over neighbors of v to find wp and wq + nbr_iterator = iter(embedding.neighbors_cw_order(v)) + while True: + nbr = next(nbr_iterator) + if nbr in marked_nodes: + # Only consider nodes that are not yet removed + continue + if is_on_outer_face(nbr): + # nbr is either wp or wq + if nbr == v1: + wp = v1 + elif nbr == v2: + wq = v2 + else: + if outer_face_cw_nbr[nbr] == v: + # nbr is wp + wp = nbr + else: + # nbr is wq + wq = nbr + if wp is not None and wq is not None: + # We don't need to iterate any further + break + + # Obtain new nodes on outer face (neighbors of v from wp to wq) + wp_wq = [wp] + nbr = wp + while nbr != wq: + # Get next neighbor (clockwise on the outer face) + next_nbr = embedding[v][nbr]["ccw"] + wp_wq.append(next_nbr) + # Update outer face + outer_face_cw_nbr[nbr] = next_nbr + outer_face_ccw_nbr[next_nbr] = nbr + # Move to next neighbor of v + nbr = next_nbr + + if len(wp_wq) == 2: + # There was a chord between wp and wq, decrease number of chords + chords[wp] -= 1 + if chords[wp] == 0: + ready_to_pick.add(wp) + chords[wq] -= 1 + if chords[wq] == 0: + ready_to_pick.add(wq) + else: + # Update all chords involving w_(p+1) to w_(q-1) + new_face_nodes = set(wp_wq[1:-1]) + for w in new_face_nodes: + # If we do not find a chord for w later we can pick it next + ready_to_pick.add(w) + for nbr in embedding.neighbors_cw_order(w): + if is_on_outer_face(nbr) and not is_outer_face_nbr(w, nbr): + # There is a chord involving w + chords[w] += 1 + ready_to_pick.discard(w) + if nbr not in new_face_nodes: + # Also increase chord for the neighbor + # We only iterator over new_face_nodes + chords[nbr] += 1 + ready_to_pick.discard(nbr) + # Set the canonical ordering node and the list of contour neighbors + canonical_ordering[k] = (v, wp_wq) + + return canonical_ordering + + +def triangulate_face(embedding, v1, v2): + """Triangulates the face given by half edge (v, w) + + Parameters + ---------- + embedding : nx.PlanarEmbedding + v1 : node + The half-edge (v1, v2) belongs to the face that gets triangulated + v2 : node + """ + _, v3 = embedding.next_face_half_edge(v1, v2) + _, v4 = embedding.next_face_half_edge(v2, v3) + if v1 in (v2, v3): + # The component has less than 3 nodes + return + while v1 != v4: + # Add edge if not already present on other side + if embedding.has_edge(v1, v3): + # Cannot triangulate at this position + v1, v2, v3 = v2, v3, v4 + else: + # Add edge for triangulation + embedding.add_half_edge(v1, v3, ccw=v2) + embedding.add_half_edge(v3, v1, cw=v2) + v1, v2, v3 = v1, v3, v4 + # Get next node + _, v4 = embedding.next_face_half_edge(v2, v3) + + +def triangulate_embedding(embedding, fully_triangulate=True): + """Triangulates the embedding. + + Traverses faces of the embedding and adds edges to a copy of the + embedding to triangulate it. + The method also ensures that the resulting graph is 2-connected by adding + edges if the same vertex is contained twice on a path around a face. + + Parameters + ---------- + embedding : nx.PlanarEmbedding + The input graph must contain at least 3 nodes. + + fully_triangulate : bool + If set to False the face with the most nodes is chooses as outer face. + This outer face does not get triangulated. + + Returns + ------- + (embedding, outer_face) : (nx.PlanarEmbedding, list) tuple + The element `embedding` is a new embedding containing all edges from + the input embedding and the additional edges to triangulate the graph. + The element `outer_face` is a list of nodes that lie on the outer face. + If the graph is fully triangulated these are three arbitrary connected + nodes. + + """ + if len(embedding.nodes) <= 1: + return embedding, list(embedding.nodes) + embedding = nx.PlanarEmbedding(embedding) + + # Get a list with a node for each connected component + component_nodes = [next(iter(x)) for x in nx.connected_components(embedding)] + + # 1. Make graph a single component (add edge between components) + for i in range(len(component_nodes) - 1): + v1 = component_nodes[i] + v2 = component_nodes[i + 1] + embedding.connect_components(v1, v2) + + # 2. Calculate faces, ensure 2-connectedness and determine outer face + outer_face = [] # A face with the most number of nodes + face_list = [] + edges_visited = set() # Used to keep track of already visited faces + for v in embedding.nodes(): + for w in embedding.neighbors_cw_order(v): + new_face = make_bi_connected(embedding, v, w, edges_visited) + if new_face: + # Found a new face + face_list.append(new_face) + if len(new_face) > len(outer_face): + # The face is a candidate to be the outer face + outer_face = new_face + + # 3. Triangulate (internal) faces + for face in face_list: + if face is not outer_face or fully_triangulate: + # Triangulate this face + triangulate_face(embedding, face[0], face[1]) + + if fully_triangulate: + v1 = outer_face[0] + v2 = outer_face[1] + v3 = embedding[v2][v1]["ccw"] + outer_face = [v1, v2, v3] + + return embedding, outer_face + + +def make_bi_connected(embedding, starting_node, outgoing_node, edges_counted): + """Triangulate a face and make it 2-connected + + This method also adds all edges on the face to `edges_counted`. + + Parameters + ---------- + embedding: nx.PlanarEmbedding + The embedding that defines the faces + starting_node : node + A node on the face + outgoing_node : node + A node such that the half edge (starting_node, outgoing_node) belongs + to the face + edges_counted: set + Set of all half-edges that belong to a face that have been visited + + Returns + ------- + face_nodes: list + A list of all nodes at the border of this face + """ + + # Check if the face has already been calculated + if (starting_node, outgoing_node) in edges_counted: + # This face was already counted + return [] + edges_counted.add((starting_node, outgoing_node)) + + # Add all edges to edges_counted which have this face to their left + v1 = starting_node + v2 = outgoing_node + face_list = [starting_node] # List of nodes around the face + face_set = set(face_list) # Set for faster queries + _, v3 = embedding.next_face_half_edge(v1, v2) + + # Move the nodes v1, v2, v3 around the face: + while v2 != starting_node or v3 != outgoing_node: + if v1 == v2: + raise nx.NetworkXException("Invalid half-edge") + # cycle is not completed yet + if v2 in face_set: + # v2 encountered twice: Add edge to ensure 2-connectedness + embedding.add_half_edge(v1, v3, ccw=v2) + embedding.add_half_edge(v3, v1, cw=v2) + edges_counted.add((v2, v3)) + edges_counted.add((v3, v1)) + v2 = v1 + else: + face_set.add(v2) + face_list.append(v2) + + # set next edge + v1 = v2 + v2, v3 = embedding.next_face_half_edge(v2, v3) + + # remember that this edge has been counted + edges_counted.add((v1, v2)) + + return face_list diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/planarity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/planarity.py new file mode 100644 index 0000000..41778f8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/planarity.py @@ -0,0 +1,1463 @@ +from collections import defaultdict +from copy import deepcopy + +import networkx as nx + +__all__ = ["check_planarity", "is_planar", "PlanarEmbedding"] + + +@nx._dispatchable +def is_planar(G): + """Returns True if and only if `G` is planar. + + A graph is *planar* iff it can be drawn in a plane without + any edge intersections. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + bool + Whether the graph is planar. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2)]) + >>> nx.is_planar(G) + True + >>> nx.is_planar(nx.complete_graph(5)) + False + + See Also + -------- + check_planarity : + Check if graph is planar *and* return a `PlanarEmbedding` instance if True. + """ + + return check_planarity(G, counterexample=False)[0] + + +@nx._dispatchable(returns_graph=True) +def check_planarity(G, counterexample=False): + """Check if a graph is planar and return a counterexample or an embedding. + + A graph is planar iff it can be drawn in a plane without + any edge intersections. + + Parameters + ---------- + G : NetworkX graph + counterexample : bool + A Kuratowski subgraph (to proof non planarity) is only returned if set + to true. + + Returns + ------- + (is_planar, certificate) : (bool, NetworkX graph) tuple + is_planar is true if the graph is planar. + If the graph is planar `certificate` is a PlanarEmbedding + otherwise it is a Kuratowski subgraph. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2)]) + >>> is_planar, P = nx.check_planarity(G) + >>> print(is_planar) + True + + When `G` is planar, a `PlanarEmbedding` instance is returned: + + >>> P.get_data() + {0: [1, 2], 1: [0], 2: [0]} + + Notes + ----- + A (combinatorial) embedding consists of cyclic orderings of the incident + edges at each vertex. Given such an embedding there are multiple approaches + discussed in literature to drawing the graph (subject to various + constraints, e.g. integer coordinates), see e.g. [2]. + + The planarity check algorithm and extraction of the combinatorial embedding + is based on the Left-Right Planarity Test [1]. + + A counterexample is only generated if the corresponding parameter is set, + because the complexity of the counterexample generation is higher. + + See also + -------- + is_planar : + Check for planarity without creating a `PlanarEmbedding` or counterexample. + + References + ---------- + .. [1] Ulrik Brandes: + The Left-Right Planarity Test + 2009 + http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.217.9208 + .. [2] Takao Nishizeki, Md Saidur Rahman: + Planar graph drawing + Lecture Notes Series on Computing: Volume 12 + 2004 + """ + + planarity_state = LRPlanarity(G) + embedding = planarity_state.lr_planarity() + if embedding is None: + # graph is not planar + if counterexample: + return False, get_counterexample(G) + else: + return False, None + else: + # graph is planar + return True, embedding + + +@nx._dispatchable(returns_graph=True) +def check_planarity_recursive(G, counterexample=False): + """Recursive version of :meth:`check_planarity`.""" + planarity_state = LRPlanarity(G) + embedding = planarity_state.lr_planarity_recursive() + if embedding is None: + # graph is not planar + if counterexample: + return False, get_counterexample_recursive(G) + else: + return False, None + else: + # graph is planar + return True, embedding + + +@nx._dispatchable(returns_graph=True) +def get_counterexample(G): + """Obtains a Kuratowski subgraph. + + Raises nx.NetworkXException if G is planar. + + The function removes edges such that the graph is still not planar. + At some point the removal of any edge would make the graph planar. + This subgraph must be a Kuratowski subgraph. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + subgraph : NetworkX graph + A Kuratowski subgraph that proves that G is not planar. + + """ + # copy graph + G = nx.Graph(G) + + if check_planarity(G)[0]: + raise nx.NetworkXException("G is planar - no counter example.") + + # find Kuratowski subgraph + subgraph = nx.Graph() + for u in G: + nbrs = list(G[u]) + for v in nbrs: + G.remove_edge(u, v) + if check_planarity(G)[0]: + G.add_edge(u, v) + subgraph.add_edge(u, v) + + return subgraph + + +@nx._dispatchable(returns_graph=True) +def get_counterexample_recursive(G): + """Recursive version of :meth:`get_counterexample`.""" + + # copy graph + G = nx.Graph(G) + + if check_planarity_recursive(G)[0]: + raise nx.NetworkXException("G is planar - no counter example.") + + # find Kuratowski subgraph + subgraph = nx.Graph() + for u in G: + nbrs = list(G[u]) + for v in nbrs: + G.remove_edge(u, v) + if check_planarity_recursive(G)[0]: + G.add_edge(u, v) + subgraph.add_edge(u, v) + + return subgraph + + +class Interval: + """Represents a set of return edges. + + All return edges in an interval induce a same constraint on the contained + edges, which means that all edges must either have a left orientation or + all edges must have a right orientation. + """ + + def __init__(self, low=None, high=None): + self.low = low + self.high = high + + def empty(self): + """Check if the interval is empty""" + return self.low is None and self.high is None + + def copy(self): + """Returns a copy of this interval""" + return Interval(self.low, self.high) + + def conflicting(self, b, planarity_state): + """Returns True if interval I conflicts with edge b""" + return ( + not self.empty() + and planarity_state.lowpt[self.high] > planarity_state.lowpt[b] + ) + + +class ConflictPair: + """Represents a different constraint between two intervals. + + The edges in the left interval must have a different orientation than + the one in the right interval. + """ + + def __init__(self, left=Interval(), right=Interval()): + self.left = left + self.right = right + + def swap(self): + """Swap left and right intervals""" + temp = self.left + self.left = self.right + self.right = temp + + def lowest(self, planarity_state): + """Returns the lowest lowpoint of a conflict pair""" + if self.left.empty(): + return planarity_state.lowpt[self.right.low] + if self.right.empty(): + return planarity_state.lowpt[self.left.low] + return min( + planarity_state.lowpt[self.left.low], planarity_state.lowpt[self.right.low] + ) + + +def top_of_stack(l): + """Returns the element on top of the stack.""" + if not l: + return None + return l[-1] + + +class LRPlanarity: + """A class to maintain the state during planarity check.""" + + __slots__ = [ + "G", + "roots", + "height", + "lowpt", + "lowpt2", + "nesting_depth", + "parent_edge", + "DG", + "adjs", + "ordered_adjs", + "ref", + "side", + "S", + "stack_bottom", + "lowpt_edge", + "left_ref", + "right_ref", + "embedding", + ] + + def __init__(self, G): + # copy G without adding self-loops + self.G = nx.Graph() + self.G.add_nodes_from(G.nodes) + for e in G.edges: + if e[0] != e[1]: + self.G.add_edge(e[0], e[1]) + + self.roots = [] + + # distance from tree root + self.height = defaultdict(lambda: None) + + self.lowpt = {} # height of lowest return point of an edge + self.lowpt2 = {} # height of second lowest return point + self.nesting_depth = {} # for nesting order + + # None -> missing edge + self.parent_edge = defaultdict(lambda: None) + + # oriented DFS graph + self.DG = nx.DiGraph() + self.DG.add_nodes_from(G.nodes) + + self.adjs = {} + self.ordered_adjs = {} + + self.ref = defaultdict(lambda: None) + self.side = defaultdict(lambda: 1) + + # stack of conflict pairs + self.S = [] + self.stack_bottom = {} + self.lowpt_edge = {} + + self.left_ref = {} + self.right_ref = {} + + self.embedding = PlanarEmbedding() + + def lr_planarity(self): + """Execute the LR planarity test. + + Returns + ------- + embedding : dict + If the graph is planar an embedding is returned. Otherwise None. + """ + if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6: + # graph is not planar + return None + + # make adjacency lists for dfs + for v in self.G: + self.adjs[v] = list(self.G[v]) + + # orientation of the graph by depth first search traversal + for v in self.G: + if self.height[v] is None: + self.height[v] = 0 + self.roots.append(v) + self.dfs_orientation(v) + + # Free no longer used variables + self.G = None + self.lowpt2 = None + self.adjs = None + + # testing + for v in self.DG: # sort the adjacency lists by nesting depth + # note: this sorting leads to non linear time + self.ordered_adjs[v] = sorted( + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) + for v in self.roots: + if not self.dfs_testing(v): + return None + + # Free no longer used variables + self.height = None + self.lowpt = None + self.S = None + self.stack_bottom = None + self.lowpt_edge = None + + for e in self.DG.edges: + self.nesting_depth[e] = self.sign(e) * self.nesting_depth[e] + + self.embedding.add_nodes_from(self.DG.nodes) + for v in self.DG: + # sort the adjacency lists again + self.ordered_adjs[v] = sorted( + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) + # initialize the embedding + previous_node = None + for w in self.ordered_adjs[v]: + self.embedding.add_half_edge(v, w, ccw=previous_node) + previous_node = w + + # Free no longer used variables + self.DG = None + self.nesting_depth = None + self.ref = None + + # compute the complete embedding + for v in self.roots: + self.dfs_embedding(v) + + # Free no longer used variables + self.roots = None + self.parent_edge = None + self.ordered_adjs = None + self.left_ref = None + self.right_ref = None + self.side = None + + return self.embedding + + def lr_planarity_recursive(self): + """Recursive version of :meth:`lr_planarity`.""" + if self.G.order() > 2 and self.G.size() > 3 * self.G.order() - 6: + # graph is not planar + return None + + # orientation of the graph by depth first search traversal + for v in self.G: + if self.height[v] is None: + self.height[v] = 0 + self.roots.append(v) + self.dfs_orientation_recursive(v) + + # Free no longer used variable + self.G = None + + # testing + for v in self.DG: # sort the adjacency lists by nesting depth + # note: this sorting leads to non linear time + self.ordered_adjs[v] = sorted( + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) + for v in self.roots: + if not self.dfs_testing_recursive(v): + return None + + for e in self.DG.edges: + self.nesting_depth[e] = self.sign_recursive(e) * self.nesting_depth[e] + + self.embedding.add_nodes_from(self.DG.nodes) + for v in self.DG: + # sort the adjacency lists again + self.ordered_adjs[v] = sorted( + self.DG[v], key=lambda x: self.nesting_depth[(v, x)] + ) + # initialize the embedding + previous_node = None + for w in self.ordered_adjs[v]: + self.embedding.add_half_edge(v, w, ccw=previous_node) + previous_node = w + + # compute the complete embedding + for v in self.roots: + self.dfs_embedding_recursive(v) + + return self.embedding + + def dfs_orientation(self, v): + """Orient the graph by DFS, compute lowpoints and nesting order.""" + # the recursion stack + dfs_stack = [v] + # index of next edge to handle in adjacency list of each node + ind = defaultdict(lambda: 0) + # boolean to indicate whether to skip the initial work for an edge + skip_init = defaultdict(lambda: False) + + while dfs_stack: + v = dfs_stack.pop() + e = self.parent_edge[v] + + for w in self.adjs[v][ind[v] :]: + vw = (v, w) + + if not skip_init[vw]: + if (v, w) in self.DG.edges or (w, v) in self.DG.edges: + ind[v] += 1 + continue # the edge was already oriented + + self.DG.add_edge(v, w) # orient the edge + + self.lowpt[vw] = self.height[v] + self.lowpt2[vw] = self.height[v] + if self.height[w] is None: # (v, w) is a tree edge + self.parent_edge[w] = vw + self.height[w] = self.height[v] + 1 + + dfs_stack.append(v) # revisit v after finishing w + dfs_stack.append(w) # visit w next + skip_init[vw] = True # don't redo this block + break # handle next node in dfs_stack (i.e. w) + else: # (v, w) is a back edge + self.lowpt[vw] = self.height[w] + + # determine nesting graph + self.nesting_depth[vw] = 2 * self.lowpt[vw] + if self.lowpt2[vw] < self.height[v]: # chordal + self.nesting_depth[vw] += 1 + + # update lowpoints of parent edge e + if e is not None: + if self.lowpt[vw] < self.lowpt[e]: + self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw]) + self.lowpt[e] = self.lowpt[vw] + elif self.lowpt[vw] > self.lowpt[e]: + self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw]) + else: + self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw]) + + ind[v] += 1 + + def dfs_orientation_recursive(self, v): + """Recursive version of :meth:`dfs_orientation`.""" + e = self.parent_edge[v] + for w in self.G[v]: + if (v, w) in self.DG.edges or (w, v) in self.DG.edges: + continue # the edge was already oriented + vw = (v, w) + self.DG.add_edge(v, w) # orient the edge + + self.lowpt[vw] = self.height[v] + self.lowpt2[vw] = self.height[v] + if self.height[w] is None: # (v, w) is a tree edge + self.parent_edge[w] = vw + self.height[w] = self.height[v] + 1 + self.dfs_orientation_recursive(w) + else: # (v, w) is a back edge + self.lowpt[vw] = self.height[w] + + # determine nesting graph + self.nesting_depth[vw] = 2 * self.lowpt[vw] + if self.lowpt2[vw] < self.height[v]: # chordal + self.nesting_depth[vw] += 1 + + # update lowpoints of parent edge e + if e is not None: + if self.lowpt[vw] < self.lowpt[e]: + self.lowpt2[e] = min(self.lowpt[e], self.lowpt2[vw]) + self.lowpt[e] = self.lowpt[vw] + elif self.lowpt[vw] > self.lowpt[e]: + self.lowpt2[e] = min(self.lowpt2[e], self.lowpt[vw]) + else: + self.lowpt2[e] = min(self.lowpt2[e], self.lowpt2[vw]) + + def dfs_testing(self, v): + """Test for LR partition.""" + # the recursion stack + dfs_stack = [v] + # index of next edge to handle in adjacency list of each node + ind = defaultdict(lambda: 0) + # boolean to indicate whether to skip the initial work for an edge + skip_init = defaultdict(lambda: False) + + while dfs_stack: + v = dfs_stack.pop() + e = self.parent_edge[v] + # to indicate whether to skip the final block after the for loop + skip_final = False + + for w in self.ordered_adjs[v][ind[v] :]: + ei = (v, w) + + if not skip_init[ei]: + self.stack_bottom[ei] = top_of_stack(self.S) + + if ei == self.parent_edge[w]: # tree edge + dfs_stack.append(v) # revisit v after finishing w + dfs_stack.append(w) # visit w next + skip_init[ei] = True # don't redo this block + skip_final = True # skip final work after breaking + break # handle next node in dfs_stack (i.e. w) + else: # back edge + self.lowpt_edge[ei] = ei + self.S.append(ConflictPair(right=Interval(ei, ei))) + + # integrate new return edges + if self.lowpt[ei] < self.height[v]: + if w == self.ordered_adjs[v][0]: # e_i has return edge + self.lowpt_edge[e] = self.lowpt_edge[ei] + else: # add constraints of e_i + if not self.add_constraints(ei, e): + # graph is not planar + return False + + ind[v] += 1 + + if not skip_final: + # remove back edges returning to parent + if e is not None: # v isn't root + self.remove_back_edges(e) + + return True + + def dfs_testing_recursive(self, v): + """Recursive version of :meth:`dfs_testing`.""" + e = self.parent_edge[v] + for w in self.ordered_adjs[v]: + ei = (v, w) + self.stack_bottom[ei] = top_of_stack(self.S) + if ei == self.parent_edge[w]: # tree edge + if not self.dfs_testing_recursive(w): + return False + else: # back edge + self.lowpt_edge[ei] = ei + self.S.append(ConflictPair(right=Interval(ei, ei))) + + # integrate new return edges + if self.lowpt[ei] < self.height[v]: + if w == self.ordered_adjs[v][0]: # e_i has return edge + self.lowpt_edge[e] = self.lowpt_edge[ei] + else: # add constraints of e_i + if not self.add_constraints(ei, e): + # graph is not planar + return False + + # remove back edges returning to parent + if e is not None: # v isn't root + self.remove_back_edges(e) + return True + + def add_constraints(self, ei, e): + P = ConflictPair() + # merge return edges of e_i into P.right + while True: + Q = self.S.pop() + if not Q.left.empty(): + Q.swap() + if not Q.left.empty(): # not planar + return False + if self.lowpt[Q.right.low] > self.lowpt[e]: + # merge intervals + if P.right.empty(): # topmost interval + P.right = Q.right.copy() + else: + self.ref[P.right.low] = Q.right.high + P.right.low = Q.right.low + else: # align + self.ref[Q.right.low] = self.lowpt_edge[e] + if top_of_stack(self.S) == self.stack_bottom[ei]: + break + # merge conflicting return edges of e_1,...,e_i-1 into P.L + while top_of_stack(self.S).left.conflicting(ei, self) or top_of_stack( + self.S + ).right.conflicting(ei, self): + Q = self.S.pop() + if Q.right.conflicting(ei, self): + Q.swap() + if Q.right.conflicting(ei, self): # not planar + return False + # merge interval below lowpt(e_i) into P.R + self.ref[P.right.low] = Q.right.high + if Q.right.low is not None: + P.right.low = Q.right.low + + if P.left.empty(): # topmost interval + P.left = Q.left.copy() + else: + self.ref[P.left.low] = Q.left.high + P.left.low = Q.left.low + + if not (P.left.empty() and P.right.empty()): + self.S.append(P) + return True + + def remove_back_edges(self, e): + u = e[0] + # trim back edges ending at parent u + # drop entire conflict pairs + while self.S and top_of_stack(self.S).lowest(self) == self.height[u]: + P = self.S.pop() + if P.left.low is not None: + self.side[P.left.low] = -1 + + if self.S: # one more conflict pair to consider + P = self.S.pop() + # trim left interval + while P.left.high is not None and P.left.high[1] == u: + P.left.high = self.ref[P.left.high] + if P.left.high is None and P.left.low is not None: + # just emptied + self.ref[P.left.low] = P.right.low + self.side[P.left.low] = -1 + P.left.low = None + # trim right interval + while P.right.high is not None and P.right.high[1] == u: + P.right.high = self.ref[P.right.high] + if P.right.high is None and P.right.low is not None: + # just emptied + self.ref[P.right.low] = P.left.low + self.side[P.right.low] = -1 + P.right.low = None + self.S.append(P) + + # side of e is side of a highest return edge + if self.lowpt[e] < self.height[u]: # e has return edge + hl = top_of_stack(self.S).left.high + hr = top_of_stack(self.S).right.high + + if hl is not None and (hr is None or self.lowpt[hl] > self.lowpt[hr]): + self.ref[e] = hl + else: + self.ref[e] = hr + + def dfs_embedding(self, v): + """Completes the embedding.""" + # the recursion stack + dfs_stack = [v] + # index of next edge to handle in adjacency list of each node + ind = defaultdict(lambda: 0) + + while dfs_stack: + v = dfs_stack.pop() + + for w in self.ordered_adjs[v][ind[v] :]: + ind[v] += 1 + ei = (v, w) + + if ei == self.parent_edge[w]: # tree edge + self.embedding.add_half_edge_first(w, v) + self.left_ref[v] = w + self.right_ref[v] = w + + dfs_stack.append(v) # revisit v after finishing w + dfs_stack.append(w) # visit w next + break # handle next node in dfs_stack (i.e. w) + else: # back edge + if self.side[ei] == 1: + self.embedding.add_half_edge(w, v, ccw=self.right_ref[w]) + else: + self.embedding.add_half_edge(w, v, cw=self.left_ref[w]) + self.left_ref[w] = v + + def dfs_embedding_recursive(self, v): + """Recursive version of :meth:`dfs_embedding`.""" + for w in self.ordered_adjs[v]: + ei = (v, w) + if ei == self.parent_edge[w]: # tree edge + self.embedding.add_half_edge_first(w, v) + self.left_ref[v] = w + self.right_ref[v] = w + self.dfs_embedding_recursive(w) + else: # back edge + if self.side[ei] == 1: + # place v directly after right_ref[w] in embed. list of w + self.embedding.add_half_edge(w, v, ccw=self.right_ref[w]) + else: + # place v directly before left_ref[w] in embed. list of w + self.embedding.add_half_edge(w, v, cw=self.left_ref[w]) + self.left_ref[w] = v + + def sign(self, e): + """Resolve the relative side of an edge to the absolute side.""" + # the recursion stack + dfs_stack = [e] + # dict to remember reference edges + old_ref = defaultdict(lambda: None) + + while dfs_stack: + e = dfs_stack.pop() + + if self.ref[e] is not None: + dfs_stack.append(e) # revisit e after finishing self.ref[e] + dfs_stack.append(self.ref[e]) # visit self.ref[e] next + old_ref[e] = self.ref[e] # remember value of self.ref[e] + self.ref[e] = None + else: + self.side[e] *= self.side[old_ref[e]] + + return self.side[e] + + def sign_recursive(self, e): + """Recursive version of :meth:`sign`.""" + if self.ref[e] is not None: + self.side[e] = self.side[e] * self.sign_recursive(self.ref[e]) + self.ref[e] = None + return self.side[e] + + +class PlanarEmbedding(nx.DiGraph): + """Represents a planar graph with its planar embedding. + + The planar embedding is given by a `combinatorial embedding + `_. + + .. note:: `check_planarity` is the preferred way to check if a graph is planar. + + **Neighbor ordering:** + + In comparison to a usual graph structure, the embedding also stores the + order of all neighbors for every vertex. + The order of the neighbors can be given in clockwise (cw) direction or + counterclockwise (ccw) direction. This order is stored as edge attributes + in the underlying directed graph. For the edge (u, v) the edge attribute + 'cw' is set to the neighbor of u that follows immediately after v in + clockwise direction. + + In order for a PlanarEmbedding to be valid it must fulfill multiple + conditions. It is possible to check if these conditions are fulfilled with + the method :meth:`check_structure`. + The conditions are: + + * Edges must go in both directions (because the edge attributes differ) + * Every edge must have a 'cw' and 'ccw' attribute which corresponds to a + correct planar embedding. + + As long as a PlanarEmbedding is invalid only the following methods should + be called: + + * :meth:`add_half_edge` + * :meth:`connect_components` + + Even though the graph is a subclass of nx.DiGraph, it can still be used + for algorithms that require undirected graphs, because the method + :meth:`is_directed` is overridden. This is possible, because a valid + PlanarGraph must have edges in both directions. + + **Half edges:** + + In methods like `add_half_edge` the term "half-edge" is used, which is + a term that is used in `doubly connected edge lists + `_. It is used + to emphasize that the edge is only in one direction and there exists + another half-edge in the opposite direction. + While conventional edges always have two faces (including outer face) next + to them, it is possible to assign each half-edge *exactly one* face. + For a half-edge (u, v) that is oriented such that u is below v then the + face that belongs to (u, v) is to the right of this half-edge. + + See Also + -------- + is_planar : + Preferred way to check if an existing graph is planar. + + check_planarity : + A convenient way to create a `PlanarEmbedding`. If not planar, + it returns a subgraph that shows this. + + Examples + -------- + + Create an embedding of a star graph (compare `nx.star_graph(3)`): + + >>> G = nx.PlanarEmbedding() + >>> G.add_half_edge(0, 1) + >>> G.add_half_edge(0, 2, ccw=1) + >>> G.add_half_edge(0, 3, ccw=2) + >>> G.add_half_edge(1, 0) + >>> G.add_half_edge(2, 0) + >>> G.add_half_edge(3, 0) + + Alternatively the same embedding can also be defined in counterclockwise + orientation. The following results in exactly the same PlanarEmbedding: + + >>> G = nx.PlanarEmbedding() + >>> G.add_half_edge(0, 1) + >>> G.add_half_edge(0, 3, cw=1) + >>> G.add_half_edge(0, 2, cw=3) + >>> G.add_half_edge(1, 0) + >>> G.add_half_edge(2, 0) + >>> G.add_half_edge(3, 0) + + After creating a graph, it is possible to validate that the PlanarEmbedding + object is correct: + + >>> G.check_structure() + + """ + + def __init__(self, incoming_graph_data=None, **attr): + super().__init__(incoming_graph_data=incoming_graph_data, **attr) + self.add_edge = self.__forbidden + self.add_edges_from = self.__forbidden + self.add_weighted_edges_from = self.__forbidden + + def __forbidden(self, *args, **kwargs): + """Forbidden operation + + Any edge additions to a PlanarEmbedding should be done using + method `add_half_edge`. + """ + raise NotImplementedError( + "Use `add_half_edge` method to add edges to a PlanarEmbedding." + ) + + def get_data(self): + """Converts the adjacency structure into a better readable structure. + + Returns + ------- + embedding : dict + A dict mapping all nodes to a list of neighbors sorted in + clockwise order. + + See Also + -------- + set_data + + """ + embedding = {} + for v in self: + embedding[v] = list(self.neighbors_cw_order(v)) + return embedding + + def set_data(self, data): + """Inserts edges according to given sorted neighbor list. + + The input format is the same as the output format of get_data(). + + Parameters + ---------- + data : dict + A dict mapping all nodes to a list of neighbors sorted in + clockwise order. + + See Also + -------- + get_data + + """ + for v in data: + ref = None + for w in reversed(data[v]): + self.add_half_edge(v, w, cw=ref) + ref = w + + def remove_node(self, n): + """Remove node n. + + Removes the node n and all adjacent edges, updating the + PlanarEmbedding to account for any resulting edge removal. + Attempting to remove a non-existent node will raise an exception. + + Parameters + ---------- + n : node + A node in the graph + + Raises + ------ + NetworkXError + If n is not in the graph. + + See Also + -------- + remove_nodes_from + + """ + try: + for u in self._pred[n]: + succs_u = self._succ[u] + un_cw = succs_u[n]["cw"] + un_ccw = succs_u[n]["ccw"] + del succs_u[n] + del self._pred[u][n] + if n != un_cw: + succs_u[un_cw]["ccw"] = un_ccw + succs_u[un_ccw]["cw"] = un_cw + del self._node[n] + del self._succ[n] + del self._pred[n] + except KeyError as err: # NetworkXError if n not in self + raise nx.NetworkXError( + f"The node {n} is not in the planar embedding." + ) from err + nx._clear_cache(self) + + def remove_nodes_from(self, nodes): + """Remove multiple nodes. + + Parameters + ---------- + nodes : iterable container + A container of nodes (list, dict, set, etc.). If a node + in the container is not in the graph it is silently ignored. + + See Also + -------- + remove_node + + Notes + ----- + When removing nodes from an iterator over the graph you are changing, + a `RuntimeError` will be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_nodes)`, and pass this + object to `G.remove_nodes_from`. + + """ + for n in nodes: + if n in self._node: + self.remove_node(n) + # silently skip non-existing nodes + + def neighbors_cw_order(self, v): + """Generator for the neighbors of v in clockwise order. + + Parameters + ---------- + v : node + + Yields + ------ + node + + """ + succs = self._succ[v] + if not succs: + # v has no neighbors + return + start_node = next(reversed(succs)) + yield start_node + current_node = succs[start_node]["cw"] + while start_node != current_node: + yield current_node + current_node = succs[current_node]["cw"] + + def add_half_edge(self, start_node, end_node, *, cw=None, ccw=None): + """Adds a half-edge from `start_node` to `end_node`. + + If the half-edge is not the first one out of `start_node`, a reference + node must be provided either in the clockwise (parameter `cw`) or in + the counterclockwise (parameter `ccw`) direction. Only one of `cw`/`ccw` + can be specified (or neither in the case of the first edge). + Note that specifying a reference in the clockwise (`cw`) direction means + inserting the new edge in the first counterclockwise position with + respect to the reference (and vice-versa). + + Parameters + ---------- + start_node : node + Start node of inserted edge. + end_node : node + End node of inserted edge. + cw, ccw: node + End node of reference edge. + Omit or pass `None` if adding the first out-half-edge of `start_node`. + + + Raises + ------ + NetworkXException + If the `cw` or `ccw` node is not a successor of `start_node`. + If `start_node` has successors, but neither `cw` or `ccw` is provided. + If both `cw` and `ccw` are specified. + + See Also + -------- + connect_components + """ + + succs = self._succ.get(start_node) + if succs: + # there is already some edge out of start_node + leftmost_nbr = next(reversed(self._succ[start_node])) + if cw is not None: + if cw not in succs: + raise nx.NetworkXError("Invalid clockwise reference node.") + if ccw is not None: + raise nx.NetworkXError("Only one of cw/ccw can be specified.") + ref_ccw = succs[cw]["ccw"] + super().add_edge(start_node, end_node, cw=cw, ccw=ref_ccw) + succs[ref_ccw]["cw"] = end_node + succs[cw]["ccw"] = end_node + # when (cw == leftmost_nbr), the newly added neighbor is + # already at the end of dict self._succ[start_node] and + # takes the place of the former leftmost_nbr + move_leftmost_nbr_to_end = cw != leftmost_nbr + elif ccw is not None: + if ccw not in succs: + raise nx.NetworkXError("Invalid counterclockwise reference node.") + ref_cw = succs[ccw]["cw"] + super().add_edge(start_node, end_node, cw=ref_cw, ccw=ccw) + succs[ref_cw]["ccw"] = end_node + succs[ccw]["cw"] = end_node + move_leftmost_nbr_to_end = True + else: + raise nx.NetworkXError( + "Node already has out-half-edge(s), either cw or ccw reference node required." + ) + if move_leftmost_nbr_to_end: + # LRPlanarity (via self.add_half_edge_first()) requires that + # we keep track of the leftmost neighbor, which we accomplish + # by keeping it as the last key in dict self._succ[start_node] + succs[leftmost_nbr] = succs.pop(leftmost_nbr) + + else: + if cw is not None or ccw is not None: + raise nx.NetworkXError("Invalid reference node.") + # adding the first edge out of start_node + super().add_edge(start_node, end_node, ccw=end_node, cw=end_node) + + def check_structure(self): + """Runs without exceptions if this object is valid. + + Checks that the following properties are fulfilled: + + * Edges go in both directions (because the edge attributes differ). + * Every edge has a 'cw' and 'ccw' attribute which corresponds to a + correct planar embedding. + + Running this method verifies that the underlying Graph must be planar. + + Raises + ------ + NetworkXException + This exception is raised with a short explanation if the + PlanarEmbedding is invalid. + """ + # Check fundamental structure + for v in self: + try: + sorted_nbrs = set(self.neighbors_cw_order(v)) + except KeyError as err: + msg = f"Bad embedding. Missing orientation for a neighbor of {v}" + raise nx.NetworkXException(msg) from err + + unsorted_nbrs = set(self[v]) + if sorted_nbrs != unsorted_nbrs: + msg = "Bad embedding. Edge orientations not set correctly." + raise nx.NetworkXException(msg) + for w in self[v]: + # Check if opposite half-edge exists + if not self.has_edge(w, v): + msg = "Bad embedding. Opposite half-edge is missing." + raise nx.NetworkXException(msg) + + # Check planarity + counted_half_edges = set() + for component in nx.connected_components(self): + if len(component) == 1: + # Don't need to check single node component + continue + num_nodes = len(component) + num_half_edges = 0 + num_faces = 0 + for v in component: + for w in self.neighbors_cw_order(v): + num_half_edges += 1 + if (v, w) not in counted_half_edges: + # We encountered a new face + num_faces += 1 + # Mark all half-edges belonging to this face + self.traverse_face(v, w, counted_half_edges) + num_edges = num_half_edges // 2 # num_half_edges is even + if num_nodes - num_edges + num_faces != 2: + # The result does not match Euler's formula + msg = "Bad embedding. The graph does not match Euler's formula" + raise nx.NetworkXException(msg) + + def add_half_edge_ccw(self, start_node, end_node, reference_neighbor): + """Adds a half-edge from start_node to end_node. + + The half-edge is added counter clockwise next to the existing half-edge + (start_node, reference_neighbor). + + Parameters + ---------- + start_node : node + Start node of inserted edge. + end_node : node + End node of inserted edge. + reference_neighbor: node + End node of reference edge. + + Raises + ------ + NetworkXException + If the reference_neighbor does not exist. + + See Also + -------- + add_half_edge + add_half_edge_cw + connect_components + + """ + self.add_half_edge(start_node, end_node, cw=reference_neighbor) + + def add_half_edge_cw(self, start_node, end_node, reference_neighbor): + """Adds a half-edge from start_node to end_node. + + The half-edge is added clockwise next to the existing half-edge + (start_node, reference_neighbor). + + Parameters + ---------- + start_node : node + Start node of inserted edge. + end_node : node + End node of inserted edge. + reference_neighbor: node + End node of reference edge. + + Raises + ------ + NetworkXException + If the reference_neighbor does not exist. + + See Also + -------- + add_half_edge + add_half_edge_ccw + connect_components + """ + self.add_half_edge(start_node, end_node, ccw=reference_neighbor) + + def remove_edge(self, u, v): + """Remove the edge between u and v. + + Parameters + ---------- + u, v : nodes + Remove the half-edges (u, v) and (v, u) and update the + edge ordering around the removed edge. + + Raises + ------ + NetworkXError + If there is not an edge between u and v. + + See Also + -------- + remove_edges_from : remove a collection of edges + """ + try: + succs_u = self._succ[u] + succs_v = self._succ[v] + uv_cw = succs_u[v]["cw"] + uv_ccw = succs_u[v]["ccw"] + vu_cw = succs_v[u]["cw"] + vu_ccw = succs_v[u]["ccw"] + del succs_u[v] + del self._pred[v][u] + del succs_v[u] + del self._pred[u][v] + if v != uv_cw: + succs_u[uv_cw]["ccw"] = uv_ccw + succs_u[uv_ccw]["cw"] = uv_cw + if u != vu_cw: + succs_v[vu_cw]["ccw"] = vu_ccw + succs_v[vu_ccw]["cw"] = vu_cw + except KeyError as err: + raise nx.NetworkXError( + f"The edge {u}-{v} is not in the planar embedding." + ) from err + nx._clear_cache(self) + + def remove_edges_from(self, ebunch): + """Remove all edges specified in ebunch. + + Parameters + ---------- + ebunch: list or container of edge tuples + Each pair of half-edges between the nodes given in the tuples + will be removed from the graph. The nodes can be passed as: + + - 2-tuples (u, v) half-edges (u, v) and (v, u). + - 3-tuples (u, v, k) where k is ignored. + + See Also + -------- + remove_edge : remove a single edge + + Notes + ----- + Will fail silently if an edge in ebunch is not in the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> ebunch = [(1, 2), (2, 3)] + >>> G.remove_edges_from(ebunch) + """ + for e in ebunch: + u, v = e[:2] # ignore edge data + # assuming that the PlanarEmbedding is valid, if the half_edge + # (u, v) is in the graph, then so is half_edge (v, u) + if u in self._succ and v in self._succ[u]: + self.remove_edge(u, v) + + def connect_components(self, v, w): + """Adds half-edges for (v, w) and (w, v) at some position. + + This method should only be called if v and w are in different + components, or it might break the embedding. + This especially means that if `connect_components(v, w)` + is called it is not allowed to call `connect_components(w, v)` + afterwards. The neighbor orientations in both directions are + all set correctly after the first call. + + Parameters + ---------- + v : node + w : node + + See Also + -------- + add_half_edge + """ + if v in self._succ and self._succ[v]: + ref = next(reversed(self._succ[v])) + else: + ref = None + self.add_half_edge(v, w, cw=ref) + if w in self._succ and self._succ[w]: + ref = next(reversed(self._succ[w])) + else: + ref = None + self.add_half_edge(w, v, cw=ref) + + def add_half_edge_first(self, start_node, end_node): + """Add a half-edge and set end_node as start_node's leftmost neighbor. + + The new edge is inserted counterclockwise with respect to the current + leftmost neighbor, if there is one. + + Parameters + ---------- + start_node : node + end_node : node + + See Also + -------- + add_half_edge + connect_components + """ + succs = self._succ.get(start_node) + # the leftmost neighbor is the last entry in the + # self._succ[start_node] dict + leftmost_nbr = next(reversed(succs)) if succs else None + self.add_half_edge(start_node, end_node, cw=leftmost_nbr) + + def next_face_half_edge(self, v, w): + """Returns the following half-edge left of a face. + + Parameters + ---------- + v : node + w : node + + Returns + ------- + half-edge : tuple + """ + new_node = self[w][v]["ccw"] + return w, new_node + + def traverse_face(self, v, w, mark_half_edges=None): + """Returns nodes on the face that belong to the half-edge (v, w). + + The face that is traversed lies to the right of the half-edge (in an + orientation where v is below w). + + Optionally it is possible to pass a set to which all encountered half + edges are added. Before calling this method, this set must not include + any half-edges that belong to the face. + + Parameters + ---------- + v : node + Start node of half-edge. + w : node + End node of half-edge. + mark_half_edges: set, optional + Set to which all encountered half-edges are added. + + Returns + ------- + face : list + A list of nodes that lie on this face. + """ + if mark_half_edges is None: + mark_half_edges = set() + + face_nodes = [v] + mark_half_edges.add((v, w)) + prev_node = v + cur_node = w + # Last half-edge is (incoming_node, v) + incoming_node = self[v][w]["cw"] + + while cur_node != v or prev_node != incoming_node: + face_nodes.append(cur_node) + prev_node, cur_node = self.next_face_half_edge(prev_node, cur_node) + if (prev_node, cur_node) in mark_half_edges: + raise nx.NetworkXException("Bad planar embedding. Impossible face.") + mark_half_edges.add((prev_node, cur_node)) + + return face_nodes + + def is_directed(self): + """A valid PlanarEmbedding is undirected. + + All reverse edges are contained, i.e. for every existing + half-edge (v, w) the half-edge in the opposite direction (w, v) is also + contained. + """ + return False + + def copy(self, as_view=False): + if as_view is True: + return nx.graphviews.generic_graph_view(self) + G = self.__class__() + G.graph.update(self.graph) + G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) + super(self.__class__, G).add_edges_from( + (u, v, datadict.copy()) + for u, nbrs in self._adj.items() + for v, datadict in nbrs.items() + ) + return G + + def to_undirected(self, reciprocal=False, as_view=False): + """ + Returns a non-embedding undirected representation of the graph. + + This method strips the planar embedding information and provides + a simple undirected graph representation. While creating the undirected graph, + all edge attributes are retained except the ``"cw"`` and ``"ccw"`` attributes + which are removed from the edge data. Those attributes are specific to + the requirements of planar embeddings. + + Parameters + ---------- + reciprocal : bool (optional) + Not supported for PlanarEmbedding. This parameter raises an exception + if used. All valid embeddings include reciprocal half-edges by definition, + making this parameter unnecessary. + as_view : bool (optional, default=False) + Not supported for PlanarEmbedding. This parameter raises an exception + if used. + + Returns + ------- + G : Graph + An undirected graph with the same name and nodes as the PlanarEmbedding. + Edges are included with their data, except for the ``"cw"`` and ``"ccw"`` + attributes, which are omitted. + + + Notes + ----- + - If edges exist in both directions ``(u, v)`` and ``(v, u)`` in the PlanarEmbedding, + attributes for the resulting undirected edge will be combined, excluding ``"cw"`` + and ``"ccw"``. + - A deep copy is made of the other edge attributes as well as the + node and graph attributes, ensuring independence of the resulting graph. + - Subclass-specific data structures used in the original graph may not transfer + to the undirected graph. The resulting graph will be of type ``nx.Graph``. + """ + + if reciprocal: + raise ValueError( + "'reciprocal=True' is not supported for PlanarEmbedding.\n" + "All valid embeddings include reciprocal half-edges by definition,\n" + "making this parameter unnecessary." + ) + + if as_view: + raise ValueError("'as_view=True' is not supported for PlanarEmbedding.") + + graph_class = self.to_undirected_class() + G = graph_class() + G.graph.update(deepcopy(self.graph)) + G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) + G.add_edges_from( + (u, v, {k: deepcopy(v) for k, v in d.items() if k not in {"cw", "ccw"}}) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + ) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/polynomials.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/polynomials.py new file mode 100644 index 0000000..7ebc755 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/polynomials.py @@ -0,0 +1,306 @@ +"""Provides algorithms supporting the computation of graph polynomials. + +Graph polynomials are polynomial-valued graph invariants that encode a wide +variety of structural information. Examples include the Tutte polynomial, +chromatic polynomial, characteristic polynomial, and matching polynomial. An +extensive treatment is provided in [1]_. + +For a simple example, the `~sympy.matrices.matrices.MatrixDeterminant.charpoly` +method can be used to compute the characteristic polynomial from the adjacency +matrix of a graph. Consider the complete graph ``K_4``: + +>>> import sympy +>>> x = sympy.Symbol("x") +>>> G = nx.complete_graph(4) +>>> A = nx.to_numpy_array(G, dtype=int) +>>> M = sympy.SparseMatrix(A) +>>> M.charpoly(x).as_expr() +x**4 - 6*x**2 - 8*x - 3 + + +.. [1] Y. Shi, M. Dehmer, X. Li, I. Gutman, + "Graph Polynomials" +""" + +from collections import deque + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["tutte_polynomial", "chromatic_polynomial"] + + +@not_implemented_for("directed") +@nx._dispatchable +def tutte_polynomial(G): + r"""Returns the Tutte polynomial of `G` + + This function computes the Tutte polynomial via an iterative version of + the deletion-contraction algorithm. + + The Tutte polynomial `T_G(x, y)` is a fundamental graph polynomial invariant in + two variables. It encodes a wide array of information related to the + edge-connectivity of a graph; "Many problems about graphs can be reduced to + problems of finding and evaluating the Tutte polynomial at certain values" [1]_. + In fact, every deletion-contraction-expressible feature of a graph is a + specialization of the Tutte polynomial [2]_ (see Notes for examples). + + There are several equivalent definitions; here are three: + + Def 1 (rank-nullity expansion): For `G` an undirected graph, `n(G)` the + number of vertices of `G`, `E` the edge set of `G`, `V` the vertex set of + `G`, and `c(A)` the number of connected components of the graph with vertex + set `V` and edge set `A` [3]_: + + .. math:: + + T_G(x, y) = \sum_{A \in E} (x-1)^{c(A) - c(E)} (y-1)^{c(A) + |A| - n(G)} + + Def 2 (spanning tree expansion): Let `G` be an undirected graph, `T` a spanning + tree of `G`, and `E` the edge set of `G`. Let `E` have an arbitrary strict + linear order `L`. Let `B_e` be the unique minimal nonempty edge cut of + $E \setminus T \cup {e}$. An edge `e` is internally active with respect to + `T` and `L` if `e` is the least edge in `B_e` according to the linear order + `L`. The internal activity of `T` (denoted `i(T)`) is the number of edges + in $E \setminus T$ that are internally active with respect to `T` and `L`. + Let `P_e` be the unique path in $T \cup {e}$ whose source and target vertex + are the same. An edge `e` is externally active with respect to `T` and `L` + if `e` is the least edge in `P_e` according to the linear order `L`. The + external activity of `T` (denoted `e(T)`) is the number of edges in + $E \setminus T$ that are externally active with respect to `T` and `L`. + Then [4]_ [5]_: + + .. math:: + + T_G(x, y) = \sum_{T \text{ a spanning tree of } G} x^{i(T)} y^{e(T)} + + Def 3 (deletion-contraction recurrence): For `G` an undirected graph, `G-e` + the graph obtained from `G` by deleting edge `e`, `G/e` the graph obtained + from `G` by contracting edge `e`, `k(G)` the number of cut-edges of `G`, + and `l(G)` the number of self-loops of `G`: + + .. math:: + T_G(x, y) = \begin{cases} + x^{k(G)} y^{l(G)}, & \text{if all edges are cut-edges or self-loops} \\ + T_{G-e}(x, y) + T_{G/e}(x, y), & \text{otherwise, for an arbitrary edge $e$ not a cut-edge or loop} + \end{cases} + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + instance of `sympy.core.add.Add` + A Sympy expression representing the Tutte polynomial for `G`. + + Examples + -------- + >>> C = nx.cycle_graph(5) + >>> nx.tutte_polynomial(C) + x**4 + x**3 + x**2 + x + y + + >>> D = nx.diamond_graph() + >>> nx.tutte_polynomial(D) + x**3 + 2*x**2 + 2*x*y + x + y**2 + y + + Notes + ----- + Some specializations of the Tutte polynomial: + + - `T_G(1, 1)` counts the number of spanning trees of `G` + - `T_G(1, 2)` counts the number of connected spanning subgraphs of `G` + - `T_G(2, 1)` counts the number of spanning forests in `G` + - `T_G(0, 2)` counts the number of strong orientations of `G` + - `T_G(2, 0)` counts the number of acyclic orientations of `G` + + Edge contraction is defined and deletion-contraction is introduced in [6]_. + Combinatorial meaning of the coefficients is introduced in [7]_. + Universality, properties, and applications are discussed in [8]_. + + Practically, up-front computation of the Tutte polynomial may be useful when + users wish to repeatedly calculate edge-connectivity-related information + about one or more graphs. + + References + ---------- + .. [1] M. Brandt, + "The Tutte Polynomial." + Talking About Combinatorial Objects Seminar, 2015 + https://math.berkeley.edu/~brandtm/talks/tutte.pdf + .. [2] A. Björklund, T. Husfeldt, P. Kaski, M. Koivisto, + "Computing the Tutte polynomial in vertex-exponential time" + 49th Annual IEEE Symposium on Foundations of Computer Science, 2008 + https://ieeexplore.ieee.org/abstract/document/4691000 + .. [3] Y. Shi, M. Dehmer, X. Li, I. Gutman, + "Graph Polynomials," p. 14 + .. [4] Y. Shi, M. Dehmer, X. Li, I. Gutman, + "Graph Polynomials," p. 46 + .. [5] A. Nešetril, J. Goodall, + "Graph invariants, homomorphisms, and the Tutte polynomial" + https://iuuk.mff.cuni.cz/~andrew/Tutte.pdf + .. [6] D. B. West, + "Introduction to Graph Theory," p. 84 + .. [7] G. Coutinho, + "A brief introduction to the Tutte polynomial" + Structural Analysis of Complex Networks, 2011 + https://homepages.dcc.ufmg.br/~gabriel/seminars/coutinho_tuttepolynomial_seminar.pdf + .. [8] J. A. Ellis-Monaghan, C. Merino, + "Graph polynomials and their applications I: The Tutte polynomial" + Structural Analysis of Complex Networks, 2011 + https://arxiv.org/pdf/0803.3079.pdf + """ + import sympy + + x = sympy.Symbol("x") + y = sympy.Symbol("y") + stack = deque() + stack.append(nx.MultiGraph(G)) + + polynomial = 0 + while stack: + G = stack.pop() + bridges = set(nx.bridges(G)) + + e = None + for i in G.edges: + if (i[0], i[1]) not in bridges and i[0] != i[1]: + e = i + break + if not e: + loops = list(nx.selfloop_edges(G, keys=True)) + polynomial += x ** len(bridges) * y ** len(loops) + else: + # deletion-contraction + C = nx.contracted_edge(G, e, self_loops=True) + C.remove_edge(e[0], e[0]) + G.remove_edge(*e) + stack.append(G) + stack.append(C) + return sympy.simplify(polynomial) + + +@not_implemented_for("directed") +@nx._dispatchable +def chromatic_polynomial(G): + r"""Returns the chromatic polynomial of `G` + + This function computes the chromatic polynomial via an iterative version of + the deletion-contraction algorithm. + + The chromatic polynomial `X_G(x)` is a fundamental graph polynomial + invariant in one variable. Evaluating `X_G(k)` for an natural number `k` + enumerates the proper k-colorings of `G`. + + There are several equivalent definitions; here are three: + + Def 1 (explicit formula): + For `G` an undirected graph, `c(G)` the number of connected components of + `G`, `E` the edge set of `G`, and `G(S)` the spanning subgraph of `G` with + edge set `S` [1]_: + + .. math:: + + X_G(x) = \sum_{S \subseteq E} (-1)^{|S|} x^{c(G(S))} + + + Def 2 (interpolating polynomial): + For `G` an undirected graph, `n(G)` the number of vertices of `G`, `k_0 = 0`, + and `k_i` the number of distinct ways to color the vertices of `G` with `i` + unique colors (for `i` a natural number at most `n(G)`), `X_G(x)` is the + unique Lagrange interpolating polynomial of degree `n(G)` through the points + `(0, k_0), (1, k_1), \dots, (n(G), k_{n(G)})` [2]_. + + + Def 3 (chromatic recurrence): + For `G` an undirected graph, `G-e` the graph obtained from `G` by deleting + edge `e`, `G/e` the graph obtained from `G` by contracting edge `e`, `n(G)` + the number of vertices of `G`, and `e(G)` the number of edges of `G` [3]_: + + .. math:: + X_G(x) = \begin{cases} + x^{n(G)}, & \text{if $e(G)=0$} \\ + X_{G-e}(x) - X_{G/e}(x), & \text{otherwise, for an arbitrary edge $e$} + \end{cases} + + This formulation is also known as the Fundamental Reduction Theorem [4]_. + + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + instance of `sympy.core.add.Add` + A Sympy expression representing the chromatic polynomial for `G`. + + Examples + -------- + >>> C = nx.cycle_graph(5) + >>> nx.chromatic_polynomial(C) + x**5 - 5*x**4 + 10*x**3 - 10*x**2 + 4*x + + >>> G = nx.complete_graph(4) + >>> nx.chromatic_polynomial(G) + x**4 - 6*x**3 + 11*x**2 - 6*x + + Notes + ----- + Interpretation of the coefficients is discussed in [5]_. Several special + cases are listed in [2]_. + + The chromatic polynomial is a specialization of the Tutte polynomial; in + particular, ``X_G(x) = T_G(x, 0)`` [6]_. + + The chromatic polynomial may take negative arguments, though evaluations + may not have chromatic interpretations. For instance, ``X_G(-1)`` enumerates + the acyclic orientations of `G` [7]_. + + References + ---------- + .. [1] D. B. West, + "Introduction to Graph Theory," p. 222 + .. [2] E. W. Weisstein + "Chromatic Polynomial" + MathWorld--A Wolfram Web Resource + https://mathworld.wolfram.com/ChromaticPolynomial.html + .. [3] D. B. West, + "Introduction to Graph Theory," p. 221 + .. [4] J. Zhang, J. Goodall, + "An Introduction to Chromatic Polynomials" + https://math.mit.edu/~apost/courses/18.204_2018/Julie_Zhang_paper.pdf + .. [5] R. C. Read, + "An Introduction to Chromatic Polynomials" + Journal of Combinatorial Theory, 1968 + https://math.berkeley.edu/~mrklug/ReadChromatic.pdf + .. [6] W. T. Tutte, + "Graph-polynomials" + Advances in Applied Mathematics, 2004 + https://www.sciencedirect.com/science/article/pii/S0196885803000411 + .. [7] R. P. Stanley, + "Acyclic orientations of graphs" + Discrete Mathematics, 2006 + https://math.mit.edu/~rstan/pubs/pubfiles/18.pdf + """ + import sympy + + x = sympy.Symbol("x") + stack = deque() + stack.append(nx.MultiGraph(G, contraction_idx=0)) + + polynomial = 0 + while stack: + G = stack.pop() + edges = list(G.edges) + if not edges: + polynomial += (-1) ** G.graph["contraction_idx"] * x ** len(G) + else: + e = edges[0] + C = nx.contracted_edge(G, e, self_loops=True) + C.graph["contraction_idx"] = G.graph["contraction_idx"] + 1 + C.remove_edge(e[0], e[0]) + G.remove_edge(*e) + stack.append(G) + stack.append(C) + return polynomial diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/reciprocity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/reciprocity.py new file mode 100644 index 0000000..5ea7ed2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/reciprocity.py @@ -0,0 +1,98 @@ +"""Algorithms to calculate reciprocity in a directed graph.""" + +import networkx as nx +from networkx import NetworkXError + +from ..utils import not_implemented_for + +__all__ = ["reciprocity", "overall_reciprocity"] + + +@not_implemented_for("undirected", "multigraph") +@nx._dispatchable +def reciprocity(G, nodes=None): + r"""Compute the reciprocity in a directed graph. + + The reciprocity of a directed graph is defined as the ratio + of the number of edges pointing in both directions to the total + number of edges in the graph. + Formally, $r = |{(u,v) \in G|(v,u) \in G}| / |{(u,v) \in G}|$. + + The reciprocity of a single node u is defined similarly, + it is the ratio of the number of edges in both directions to + the total number of edges attached to node u. + + Parameters + ---------- + G : graph + A networkx directed graph + nodes : container of nodes, optional (default=whole graph) + Compute reciprocity for nodes in this container. + + Returns + ------- + out : dictionary + Reciprocity keyed by node label. + + Notes + ----- + The reciprocity is not defined for isolated nodes. + In such cases this function will return None. + + """ + # If `nodes` is not specified, calculate the reciprocity of the graph. + if nodes is None: + return overall_reciprocity(G) + + # If `nodes` represents a single node in the graph, return only its + # reciprocity. + if nodes in G: + reciprocity = next(_reciprocity_iter(G, nodes))[1] + if reciprocity is None: + raise NetworkXError("Not defined for isolated nodes.") + else: + return reciprocity + + # Otherwise, `nodes` represents an iterable of nodes, so return a + # dictionary mapping node to its reciprocity. + return dict(_reciprocity_iter(G, nodes)) + + +def _reciprocity_iter(G, nodes): + """Return an iterator of (node, reciprocity).""" + n = G.nbunch_iter(nodes) + for node in n: + pred = set(G.predecessors(node)) + succ = set(G.successors(node)) + overlap = pred & succ + n_total = len(pred) + len(succ) + + # Reciprocity is not defined for isolated nodes. + # Return None. + if n_total == 0: + yield (node, None) + else: + reciprocity = 2 * len(overlap) / n_total + yield (node, reciprocity) + + +@not_implemented_for("undirected", "multigraph") +@nx._dispatchable +def overall_reciprocity(G): + """Compute the reciprocity for the whole graph. + + See the doc of reciprocity for the definition. + + Parameters + ---------- + G : graph + A networkx graph + + """ + n_all_edge = G.number_of_edges() + n_overlap_edge = (n_all_edge - G.to_undirected().number_of_edges()) * 2 + + if n_all_edge == 0: + raise NetworkXError("Not defined for empty graphs") + + return n_overlap_edge / n_all_edge diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/regular.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/regular.py new file mode 100644 index 0000000..f483ef3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/regular.py @@ -0,0 +1,215 @@ +"""Functions for computing and verifying regular graphs.""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["is_regular", "is_k_regular", "k_factor"] + + +@nx._dispatchable +def is_regular(G): + """Determines whether the graph ``G`` is a regular graph. + + A regular graph is a graph where each vertex has the same degree. A + regular digraph is a graph where the indegree and outdegree of each + vertex are equal. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + bool + Whether the given graph or digraph is regular. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 1)]) + >>> nx.is_regular(G) + True + + """ + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph has no nodes.") + n1 = nx.utils.arbitrary_element(G) + if not G.is_directed(): + d1 = G.degree(n1) + return all(d1 == d for _, d in G.degree) + else: + d_in = G.in_degree(n1) + in_regular = all(d_in == d for _, d in G.in_degree) + d_out = G.out_degree(n1) + out_regular = all(d_out == d for _, d in G.out_degree) + return in_regular and out_regular + + +@not_implemented_for("directed") +@nx._dispatchable +def is_k_regular(G, k): + """Determines whether the graph ``G`` is a k-regular graph. + + A k-regular graph is a graph where each vertex has degree k. + + Parameters + ---------- + G : NetworkX graph + + Returns + ------- + bool + Whether the given graph is k-regular. + + Examples + -------- + >>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)]) + >>> nx.is_k_regular(G, k=3) + False + + """ + return all(d == k for n, d in G.degree) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True) +def k_factor(G, k, matching_weight="weight"): + """Compute a k-factor of G + + A k-factor of a graph is a spanning k-regular subgraph. + A spanning k-regular subgraph of G is a subgraph that contains + each vertex of G and a subset of the edges of G such that each + vertex has degree k. + + Parameters + ---------- + G : NetworkX graph + Undirected graph + + matching_weight: string, optional (default='weight') + Edge data key corresponding to the edge weight. + Used for finding the max-weighted perfect matching. + If key not found, uses 1 as weight. + + Returns + ------- + G2 : NetworkX graph + A k-factor of G + + Examples + -------- + >>> G = nx.Graph([(1, 2), (2, 3), (3, 4), (4, 1)]) + >>> G2 = nx.k_factor(G, k=1) + >>> G2.edges() + EdgeView([(1, 2), (3, 4)]) + + References + ---------- + .. [1] "An algorithm for computing simple k-factors.", + Meijer, Henk, Yurai Núñez-Rodríguez, and David Rappaport, + Information processing letters, 2009. + """ + + from networkx.algorithms.matching import is_perfect_matching, max_weight_matching + + class LargeKGadget: + def __init__(self, k, degree, node, g): + self.original = node + self.g = g + self.k = k + self.degree = degree + + self.outer_vertices = [(node, x) for x in range(degree)] + self.core_vertices = [(node, x + degree) for x in range(degree - k)] + + def replace_node(self): + adj_view = self.g[self.original] + neighbors = list(adj_view.keys()) + edge_attrs = list(adj_view.values()) + for outer, neighbor, edge_attrs in zip( + self.outer_vertices, neighbors, edge_attrs + ): + self.g.add_edge(outer, neighbor, **edge_attrs) + for core in self.core_vertices: + for outer in self.outer_vertices: + self.g.add_edge(core, outer) + self.g.remove_node(self.original) + + def restore_node(self): + self.g.add_node(self.original) + for outer in self.outer_vertices: + adj_view = self.g[outer] + for neighbor, edge_attrs in list(adj_view.items()): + if neighbor not in self.core_vertices: + self.g.add_edge(self.original, neighbor, **edge_attrs) + break + g.remove_nodes_from(self.outer_vertices) + g.remove_nodes_from(self.core_vertices) + + class SmallKGadget: + def __init__(self, k, degree, node, g): + self.original = node + self.k = k + self.degree = degree + self.g = g + + self.outer_vertices = [(node, x) for x in range(degree)] + self.inner_vertices = [(node, x + degree) for x in range(degree)] + self.core_vertices = [(node, x + 2 * degree) for x in range(k)] + + def replace_node(self): + adj_view = self.g[self.original] + for outer, inner, (neighbor, edge_attrs) in zip( + self.outer_vertices, self.inner_vertices, list(adj_view.items()) + ): + self.g.add_edge(outer, inner) + self.g.add_edge(outer, neighbor, **edge_attrs) + for core in self.core_vertices: + for inner in self.inner_vertices: + self.g.add_edge(core, inner) + self.g.remove_node(self.original) + + def restore_node(self): + self.g.add_node(self.original) + for outer in self.outer_vertices: + adj_view = self.g[outer] + for neighbor, edge_attrs in adj_view.items(): + if neighbor not in self.core_vertices: + self.g.add_edge(self.original, neighbor, **edge_attrs) + break + self.g.remove_nodes_from(self.outer_vertices) + self.g.remove_nodes_from(self.inner_vertices) + self.g.remove_nodes_from(self.core_vertices) + + # Step 1 + if any(d < k for _, d in G.degree): + raise nx.NetworkXUnfeasible("Graph contains a vertex with degree less than k") + g = G.copy() + + # Step 2 + gadgets = [] + for node, degree in list(g.degree): + if k < degree / 2.0: + gadget = SmallKGadget(k, degree, node, g) + else: + gadget = LargeKGadget(k, degree, node, g) + gadget.replace_node() + gadgets.append(gadget) + + # Step 3 + matching = max_weight_matching(g, maxcardinality=True, weight=matching_weight) + + # Step 4 + if not is_perfect_matching(g, matching): + raise nx.NetworkXUnfeasible( + "Cannot find k-factor because no perfect matching exists" + ) + + for edge in g.edges(): + if edge not in matching and (edge[1], edge[0]) not in matching: + g.remove_edge(edge[0], edge[1]) + + for gadget in gadgets: + gadget.restore_node() + + return g diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/richclub.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/richclub.py new file mode 100644 index 0000000..445b27d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/richclub.py @@ -0,0 +1,138 @@ +"""Functions for computing rich-club coefficients.""" + +from itertools import accumulate + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["rich_club_coefficient"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def rich_club_coefficient(G, normalized=True, Q=100, seed=None): + r"""Returns the rich-club coefficient of the graph `G`. + + For each degree *k*, the *rich-club coefficient* is the ratio of the + number of actual to the number of potential edges for nodes with + degree greater than *k*: + + .. math:: + + \phi(k) = \frac{2 E_k}{N_k (N_k - 1)} + + where `N_k` is the number of nodes with degree larger than *k*, and + `E_k` is the number of edges among those nodes. + + Parameters + ---------- + G : NetworkX graph + Undirected graph with neither parallel edges nor self-loops. + normalized : bool (optional) + Normalize using randomized network as in [1]_ + Q : float (optional, default=100) + If `normalized` is True, perform `Q * m` double-edge + swaps, where `m` is the number of edges in `G`, to use as a + null-model for normalization. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + rc : dictionary + A dictionary, keyed by degree, with rich-club coefficient values. + + Raises + ------ + NetworkXError + If `G` has fewer than four nodes and ``normalized=True``. + A randomly sampled graph for normalization cannot be generated in this case. + + Examples + -------- + >>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) + >>> rc = nx.rich_club_coefficient(G, normalized=False, seed=42) + >>> rc[0] + 0.4 + + Notes + ----- + The rich club definition and algorithm are found in [1]_. This + algorithm ignores any edge weights and is not defined for directed + graphs or graphs with parallel edges or self loops. + + Normalization is done by computing the rich club coefficient for a randomly + sampled graph with the same degree distribution as `G` by + repeatedly swapping the endpoints of existing edges. For graphs with fewer than 4 + nodes, it is not possible to generate a random graph with a prescribed + degree distribution, as the degree distribution fully determines the graph + (hence making the coefficients trivially normalized to 1). + This function raises an exception in this case. + + Estimates for appropriate values of `Q` are found in [2]_. + + References + ---------- + .. [1] Julian J. McAuley, Luciano da Fontoura Costa, + and Tibério S. Caetano, + "The rich-club phenomenon across complex network hierarchies", + Applied Physics Letters Vol 91 Issue 8, August 2007. + https://arxiv.org/abs/physics/0701290 + .. [2] R. Milo, N. Kashtan, S. Itzkovitz, M. E. J. Newman, U. Alon, + "Uniform generation of random graphs with arbitrary degree + sequences", 2006. https://arxiv.org/abs/cond-mat/0312028 + """ + if nx.number_of_selfloops(G) > 0: + raise Exception( + "rich_club_coefficient is not implemented for graphs with self loops." + ) + rc = _compute_rc(G) + if normalized: + # make R a copy of G, randomize with Q*|E| double edge swaps + # and use rich_club coefficient of R to normalize + R = G.copy() + E = R.number_of_edges() + nx.double_edge_swap(R, Q * E, max_tries=Q * E * 10, seed=seed) + rcran = _compute_rc(R) + rc = {k: v / rcran[k] for k, v in rc.items()} + return rc + + +def _compute_rc(G): + """Returns the rich-club coefficient for each degree in the graph + `G`. + + `G` is an undirected graph without multiedges. + + Returns a dictionary mapping degree to rich-club coefficient for + that degree. + + """ + deghist = nx.degree_histogram(G) + total = sum(deghist) + # Compute the number of nodes with degree greater than `k`, for each + # degree `k` (omitting the last entry, which is zero). + nks = (total - cs for cs in accumulate(deghist) if total - cs > 1) + # Create a sorted list of pairs of edge endpoint degrees. + # + # The list is sorted in reverse order so that we can pop from the + # right side of the list later, instead of popping from the left + # side of the list, which would have a linear time cost. + edge_degrees = sorted((sorted(map(G.degree, e)) for e in G.edges()), reverse=True) + ek = G.number_of_edges() + if ek == 0: + return {} + + k1, k2 = edge_degrees.pop() + rc = {} + for d, nk in enumerate(nks): + while k1 <= d: + if len(edge_degrees) == 0: + ek = 0 + break + k1, k2 = edge_degrees.pop() + ek -= 1 + rc[d] = 2 * ek / (nk * (nk - 1)) + return rc diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__init__.py new file mode 100644 index 0000000..eb0d91c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__init__.py @@ -0,0 +1,5 @@ +from networkx.algorithms.shortest_paths.generic import * +from networkx.algorithms.shortest_paths.unweighted import * +from networkx.algorithms.shortest_paths.weighted import * +from networkx.algorithms.shortest_paths.astar import * +from networkx.algorithms.shortest_paths.dense import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c0968b2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/astar.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/astar.cpython-312.pyc new file mode 100644 index 0000000..56b6d08 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/astar.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-312.pyc new file mode 100644 index 0000000..d7a3f44 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/dense.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/generic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/generic.cpython-312.pyc new file mode 100644 index 0000000..941e6fa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/generic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-312.pyc new file mode 100644 index 0000000..76c0471 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/unweighted.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-312.pyc new file mode 100644 index 0000000..e8b002b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/__pycache__/weighted.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/astar.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/astar.py new file mode 100644 index 0000000..1182297 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/astar.py @@ -0,0 +1,239 @@ +"""Shortest paths and path lengths using the A* ("A star") algorithm.""" + +from heapq import heappop, heappush +from itertools import count + +import networkx as nx +from networkx.algorithms.shortest_paths.weighted import _weight_function + +__all__ = ["astar_path", "astar_path_length"] + + +@nx._dispatchable(edge_attrs="weight", preserve_node_attrs="heuristic") +def astar_path(G, source, target, heuristic=None, weight="weight", *, cutoff=None): + """Returns a list of nodes in a shortest path between source and target + using the A* ("A-star") algorithm. + + There may be more than one shortest path. This returns only one. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path + + target : node + Ending node for path + + heuristic : function + A function to evaluate the estimate of the distance + from the a node to the target. The function takes + two nodes arguments and must return a number. + If the heuristic is inadmissible (if it might + overestimate the cost of reaching the goal from a node), + the result may not be a shortest path. + The algorithm does not support updating heuristic + values for the same node due to caching the first + heuristic calculation per node. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + cutoff : float, optional + If this is provided, the search will be bounded to this value. I.e. if + the evaluation function surpasses this value for a node n, the node will not + be expanded further and will be ignored. More formally, let h'(n) be the + heuristic function, and g(n) be the cost of reaching n from the source node. Then, + if g(n) + h'(n) > cutoff, the node will not be explored further. + Note that if the heuristic is inadmissible, it is possible that paths + are ignored even though they satisfy the cutoff. + + Raises + ------ + NetworkXNoPath + If no path exists between source and target. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> print(nx.astar_path(G, 0, 4)) + [0, 1, 2, 3, 4] + >>> G = nx.grid_graph(dim=[3, 3]) # nodes are two-tuples (x,y) + >>> nx.set_edge_attributes(G, {e: e[1][0] * 2 for e in G.edges()}, "cost") + >>> def dist(a, b): + ... (x1, y1) = a + ... (x2, y2) = b + ... return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5 + >>> print(nx.astar_path(G, (0, 0), (2, 2), heuristic=dist, weight="cost")) + [(0, 0), (0, 1), (0, 2), (1, 2), (2, 2)] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + See Also + -------- + shortest_path, dijkstra_path + + """ + if source not in G: + raise nx.NodeNotFound(f"Source {source} is not in G") + + if target not in G: + raise nx.NodeNotFound(f"Target {target} is not in G") + + if heuristic is None: + # The default heuristic is h=0 - same as Dijkstra's algorithm + def heuristic(u, v): + return 0 + + weight = _weight_function(G, weight) + + G_succ = G._adj # For speed-up (and works for both directed and undirected graphs) + + # The queue stores priority, node, cost to reach, and parent. + # Uses Python heapq to keep in priority order. + # Add a counter to the queue to prevent the underlying heap from + # attempting to compare the nodes themselves. The hash breaks ties in the + # priority and is guaranteed unique for all nodes in the graph. + c = count() + queue = [(0, next(c), source, 0, None)] + + # Maps enqueued nodes to distance of discovered paths and the + # computed heuristics to target. We avoid computing the heuristics + # more than once and inserting the node into the queue too many times. + enqueued = {} + # Maps explored nodes to parent closest to the source. + explored = {} + + while queue: + # Pop the smallest item from queue. + _, __, curnode, dist, parent = heappop(queue) + + if curnode == target: + path = [curnode] + node = parent + while node is not None: + path.append(node) + node = explored[node] + path.reverse() + return path + + if curnode in explored: + # Do not override the parent of starting node + if explored[curnode] is None: + continue + + # Skip bad paths that were enqueued before finding a better one + qcost, h = enqueued[curnode] + if qcost < dist: + continue + + explored[curnode] = parent + + for neighbor, w in G_succ[curnode].items(): + cost = weight(curnode, neighbor, w) + if cost is None: + continue + ncost = dist + cost + if neighbor in enqueued: + qcost, h = enqueued[neighbor] + # if qcost <= ncost, a less costly path from the + # neighbor to the source was already determined. + # Therefore, we won't attempt to push this neighbor + # to the queue + if qcost <= ncost: + continue + else: + h = heuristic(neighbor, target) + + if cutoff and ncost + h > cutoff: + continue + + enqueued[neighbor] = ncost, h + heappush(queue, (ncost + h, next(c), neighbor, ncost, curnode)) + + raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") + + +@nx._dispatchable(edge_attrs="weight", preserve_node_attrs="heuristic") +def astar_path_length( + G, source, target, heuristic=None, weight="weight", *, cutoff=None +): + """Returns the length of the shortest path between source and target using + the A* ("A-star") algorithm. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path + + target : node + Ending node for path + + heuristic : function + A function to evaluate the estimate of the distance + from the a node to the target. The function takes + two nodes arguments and must return a number. + If the heuristic is inadmissible (if it might + overestimate the cost of reaching the goal from a node), + the result may not be a shortest path. + The algorithm does not support updating heuristic + values for the same node due to caching the first + heuristic calculation per node. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + cutoff : float, optional + If this is provided, the search will be bounded to this value. I.e. if + the evaluation function surpasses this value for a node n, the node will not + be expanded further and will be ignored. More formally, let h'(n) be the + heuristic function, and g(n) be the cost of reaching n from the source node. Then, + if g(n) + h'(n) > cutoff, the node will not be explored further. + Note that if the heuristic is inadmissible, it is possible that paths + are ignored even though they satisfy the cutoff. + + Raises + ------ + NetworkXNoPath + If no path exists between source and target. + + See Also + -------- + astar_path + + """ + if source not in G or target not in G: + msg = f"Either source {source} or target {target} is not in G" + raise nx.NodeNotFound(msg) + + weight = _weight_function(G, weight) + path = astar_path(G, source, target, heuristic, weight, cutoff=cutoff) + return sum(weight(u, v, G[u][v]) for u, v in zip(path[:-1], path[1:])) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/dense.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/dense.py new file mode 100644 index 0000000..d259d8c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/dense.py @@ -0,0 +1,264 @@ +"""Floyd-Warshall algorithm for shortest paths.""" + +import networkx as nx + +__all__ = [ + "floyd_warshall", + "floyd_warshall_predecessor_and_distance", + "reconstruct_path", + "floyd_warshall_numpy", +] + + +@nx._dispatchable(edge_attrs="weight") +def floyd_warshall_numpy(G, nodelist=None, weight="weight"): + """Find all-pairs shortest path lengths using Floyd's algorithm. + + This algorithm for finding shortest paths takes advantage of + matrix representations of a graph and works well for dense + graphs where all-pairs shortest path lengths are desired. + The results are returned as a NumPy array, distance[i, j], + where i and j are the indexes of two nodes in nodelist. + The entry distance[i, j] is the distance along a shortest + path from i to j. If no path exists the distance is Inf. + + Parameters + ---------- + G : NetworkX graph + + nodelist : list, optional (default=G.nodes) + The rows and columns are ordered by the nodes in nodelist. + If nodelist is None then the ordering is produced by G.nodes. + Nodelist should include all nodes in G. + + weight: string, optional (default='weight') + Edge data key corresponding to the edge weight. + + Returns + ------- + distance : 2D numpy.ndarray + A numpy array of shortest path distances between nodes. + If there is no path between two nodes the value is Inf. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... [(0, 1, 5), (1, 2, 2), (2, 3, -3), (1, 3, 10), (3, 2, 8)] + ... ) + >>> nx.floyd_warshall_numpy(G) + array([[ 0., 5., 7., 4.], + [inf, 0., 2., -1.], + [inf, inf, 0., -3.], + [inf, inf, 8., 0.]]) + + Notes + ----- + Floyd's algorithm is appropriate for finding shortest paths in + dense graphs or graphs with negative weights when Dijkstra's + algorithm fails. This algorithm can still fail if there are negative + cycles. It has running time $O(n^3)$ with running space of $O(n^2)$. + + Raises + ------ + NetworkXError + If nodelist is not a list of the nodes in G. + """ + import numpy as np + + if nodelist is not None: + if not (len(nodelist) == len(G) == len(set(nodelist))): + raise nx.NetworkXError( + "nodelist must contain every node in G with no repeats." + "If you wanted a subgraph of G use G.subgraph(nodelist)" + ) + + # To handle cases when an edge has weight=0, we must make sure that + # nonedges are not given the value 0 as well. + A = nx.to_numpy_array( + G, nodelist, multigraph_weight=min, weight=weight, nonedge=np.inf + ) + n, m = A.shape + np.fill_diagonal(A, 0) # diagonal elements should be zero + for i in range(n): + # The second term has the same shape as A due to broadcasting + A = np.minimum(A, A[i, :][np.newaxis, :] + A[:, i][:, np.newaxis]) + return A + + +@nx._dispatchable(edge_attrs="weight") +def floyd_warshall_predecessor_and_distance(G, weight="weight"): + """Find all-pairs shortest path lengths using Floyd's algorithm. + + Parameters + ---------- + G : NetworkX graph + + weight: string, optional (default= 'weight') + Edge data key corresponding to the edge weight. + + Returns + ------- + predecessor,distance : dictionaries + Dictionaries, keyed by source and target, of predecessors and distances + in the shortest path. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... [ + ... ("s", "u", 10), + ... ("s", "x", 5), + ... ("u", "v", 1), + ... ("u", "x", 2), + ... ("v", "y", 1), + ... ("x", "u", 3), + ... ("x", "v", 5), + ... ("x", "y", 2), + ... ("y", "s", 7), + ... ("y", "v", 6), + ... ] + ... ) + >>> predecessors, _ = nx.floyd_warshall_predecessor_and_distance(G) + >>> print(nx.reconstruct_path("s", "v", predecessors)) + ['s', 'x', 'u', 'v'] + + Notes + ----- + Floyd's algorithm is appropriate for finding shortest paths + in dense graphs or graphs with negative weights when Dijkstra's algorithm + fails. This algorithm can still fail if there are negative cycles. + It has running time $O(n^3)$ with running space of $O(n^2)$. + + See Also + -------- + floyd_warshall + floyd_warshall_numpy + all_pairs_shortest_path + all_pairs_shortest_path_length + """ + from collections import defaultdict + + # dictionary-of-dictionaries representation for dist and pred + # use some defaultdict magick here + # for dist the default is the floating point inf value + dist = defaultdict(lambda: defaultdict(lambda: float("inf"))) + for u in G: + dist[u][u] = 0 + pred = defaultdict(dict) + # initialize path distance dictionary to be the adjacency matrix + # also set the distance to self to 0 (zero diagonal) + undirected = not G.is_directed() + for u, v, d in G.edges(data=True): + e_weight = d.get(weight, 1.0) + dist[u][v] = min(e_weight, dist[u][v]) + pred[u][v] = u + if undirected: + dist[v][u] = min(e_weight, dist[v][u]) + pred[v][u] = v + for w in G: + dist_w = dist[w] # save recomputation + for u in G: + dist_u = dist[u] # save recomputation + for v in G: + d = dist_u[w] + dist_w[v] + if dist_u[v] > d: + dist_u[v] = d + pred[u][v] = pred[w][v] + return dict(pred), dict(dist) + + +@nx._dispatchable(graphs=None) +def reconstruct_path(source, target, predecessors): + """Reconstruct a path from source to target using the predecessors + dict as returned by floyd_warshall_predecessor_and_distance + + Parameters + ---------- + source : node + Starting node for path + + target : node + Ending node for path + + predecessors: dictionary + Dictionary, keyed by source and target, of predecessors in the + shortest path, as returned by floyd_warshall_predecessor_and_distance + + Returns + ------- + path : list + A list of nodes containing the shortest path from source to target + + If source and target are the same, an empty list is returned + + Notes + ----- + This function is meant to give more applicability to the + floyd_warshall_predecessor_and_distance function + + See Also + -------- + floyd_warshall_predecessor_and_distance + """ + if source == target: + return [] + prev = predecessors[source] + curr = prev[target] + path = [target, curr] + while curr != source: + curr = prev[curr] + path.append(curr) + return list(reversed(path)) + + +@nx._dispatchable(edge_attrs="weight") +def floyd_warshall(G, weight="weight"): + """Find all-pairs shortest path lengths using Floyd's algorithm. + + Parameters + ---------- + G : NetworkX graph + + weight: string, optional (default= 'weight') + Edge data key corresponding to the edge weight. + + + Returns + ------- + distance : dict + A dictionary, keyed by source and target, of shortest paths distances + between nodes. + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... [(0, 1, 5), (1, 2, 2), (2, 3, -3), (1, 3, 10), (3, 2, 8)] + ... ) + >>> fw = nx.floyd_warshall(G, weight="weight") + >>> results = {a: dict(b) for a, b in fw.items()} + >>> pprint(results) + {0: {0: 0, 1: 5, 2: 7, 3: 4}, + 1: {0: inf, 1: 0, 2: 2, 3: -1}, + 2: {0: inf, 1: inf, 2: 0, 3: -3}, + 3: {0: inf, 1: inf, 2: 8, 3: 0}} + + Notes + ----- + Floyd's algorithm is appropriate for finding shortest paths + in dense graphs or graphs with negative weights when Dijkstra's algorithm + fails. This algorithm can still fail if there are negative cycles. + It has running time $O(n^3)$ with running space of $O(n^2)$. + + See Also + -------- + floyd_warshall_predecessor_and_distance + floyd_warshall_numpy + all_pairs_shortest_path + all_pairs_shortest_path_length + """ + # could make this its own function to reduce memory costs + return floyd_warshall_predecessor_and_distance(G, weight=weight)[1] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/generic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/generic.py new file mode 100644 index 0000000..660032f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/generic.py @@ -0,0 +1,713 @@ +""" +Compute the shortest paths and path lengths between nodes in the graph. + +These algorithms work with undirected and directed graphs. + +""" + +import networkx as nx + +__all__ = [ + "shortest_path", + "all_shortest_paths", + "single_source_all_shortest_paths", + "all_pairs_all_shortest_paths", + "shortest_path_length", + "average_shortest_path_length", + "has_path", +] + + +@nx._dispatchable +def has_path(G, source, target): + """Returns *True* if *G* has a path from *source* to *target*. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path + + target : node + Ending node for path + """ + try: + nx.shortest_path(G, source, target) + except nx.NetworkXNoPath: + return False + return True + + +@nx._dispatchable(edge_attrs="weight") +def shortest_path(G, source=None, target=None, weight=None, method="dijkstra"): + """Compute shortest paths in the graph. + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Starting node for path. If not specified, compute shortest + paths for each possible starting node. + + target : node, optional + Ending node for path. If not specified, compute shortest + paths to all possible nodes. + + weight : None, string or function, optional (default = None) + If None, every edge has weight/distance/cost 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly + three positional arguments: the two endpoints of an edge and + the dictionary of edge attributes for that edge. + The function must return a number. + + method : string, optional (default = 'dijkstra') + The algorithm to use to compute the path. + Supported options: 'dijkstra', 'bellman-ford'. + Other inputs produce a ValueError. + If `weight` is None, unweighted graph methods are used, and this + suggestion is ignored. + + Returns + ------- + path: list or dictionary or iterator + All returned paths include both the source and target in the path. + + If the source and target are both specified, return a single list + of nodes in a shortest path from the source to the target. + + If only the source is specified, return a dictionary keyed by + targets with a list of nodes in a shortest path from the source + to one of the targets. + + If only the target is specified, return a dictionary keyed by + sources with a list of nodes in a shortest path from one of the + sources to the target. + + If neither the source nor target are specified, return an iterator + over (source, dictionary) where dictionary is keyed by target to + list of nodes in a shortest path from the source to the target. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + ValueError + If `method` is not among the supported options. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> print(nx.shortest_path(G, source=0, target=4)) + [0, 1, 2, 3, 4] + >>> p = nx.shortest_path(G, source=0) # target not specified + >>> p[3] # shortest path from source=0 to target=3 + [0, 1, 2, 3] + >>> p = nx.shortest_path(G, target=4) # source not specified + >>> p[1] # shortest path from source=1 to target=4 + [1, 2, 3, 4] + >>> p = dict(nx.shortest_path(G)) # source, target not specified + >>> p[2][4] # shortest path from source=2 to target=4 + [2, 3, 4] + + Notes + ----- + There may be more than one shortest path between a source and target. + This returns only one of them. + + See Also + -------- + all_pairs_shortest_path + all_pairs_dijkstra_path + all_pairs_bellman_ford_path + single_source_shortest_path + single_source_dijkstra_path + single_source_bellman_ford_path + """ + if method not in ("dijkstra", "bellman-ford"): + # so we don't need to check in each branch later + raise ValueError(f"method not supported: {method}") + method = "unweighted" if weight is None else method + if source is None: + if target is None: + # Find paths between all pairs. Iterator of dicts. + if method == "unweighted": + paths = nx.all_pairs_shortest_path(G) + elif method == "dijkstra": + paths = nx.all_pairs_dijkstra_path(G, weight=weight) + else: # method == 'bellman-ford': + paths = nx.all_pairs_bellman_ford_path(G, weight=weight) + else: + # Find paths from all nodes co-accessible to the target. + if G.is_directed(): + G = G.reverse(copy=False) + if method == "unweighted": + paths = nx.single_source_shortest_path(G, target) + elif method == "dijkstra": + paths = nx.single_source_dijkstra_path(G, target, weight=weight) + else: # method == 'bellman-ford': + paths = nx.single_source_bellman_ford_path(G, target, weight=weight) + # Now flip the paths so they go from a source to the target. + for target in paths: + paths[target] = list(reversed(paths[target])) + else: + if target is None: + # Find paths to all nodes accessible from the source. + if method == "unweighted": + paths = nx.single_source_shortest_path(G, source) + elif method == "dijkstra": + paths = nx.single_source_dijkstra_path(G, source, weight=weight) + else: # method == 'bellman-ford': + paths = nx.single_source_bellman_ford_path(G, source, weight=weight) + else: + # Find shortest source-target path. + if method == "unweighted": + paths = nx.bidirectional_shortest_path(G, source, target) + elif method == "dijkstra": + _, paths = nx.bidirectional_dijkstra(G, source, target, weight) + else: # method == 'bellman-ford': + paths = nx.bellman_ford_path(G, source, target, weight) + return paths + + +@nx._dispatchable(edge_attrs="weight") +def shortest_path_length(G, source=None, target=None, weight=None, method="dijkstra"): + """Compute shortest path lengths in the graph. + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Starting node for path. + If not specified, compute shortest path lengths using all nodes as + source nodes. + + target : node, optional + Ending node for path. + If not specified, compute shortest path lengths using all nodes as + target nodes. + + weight : None, string or function, optional (default = None) + If None, every edge has weight/distance/cost 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly + three positional arguments: the two endpoints of an edge and + the dictionary of edge attributes for that edge. + The function must return a number. + + method : string, optional (default = 'dijkstra') + The algorithm to use to compute the path length. + Supported options: 'dijkstra', 'bellman-ford'. + Other inputs produce a ValueError. + If `weight` is None, unweighted graph methods are used, and this + suggestion is ignored. + + Returns + ------- + length: number or iterator + If the source and target are both specified, return the length of + the shortest path from the source to the target. + + If only the source is specified, return a dict keyed by target + to the shortest path length from the source to that target. + + If only the target is specified, return a dict keyed by source + to the shortest path length from that source to the target. + + If neither the source nor target are specified, return an iterator + over (source, dictionary) where dictionary is keyed by target to + shortest path length from source to that target. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + NetworkXNoPath + If no path exists between source and target. + + ValueError + If `method` is not among the supported options. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.shortest_path_length(G, source=0, target=4) + 4 + >>> p = nx.shortest_path_length(G, source=0) # target not specified + >>> p[4] + 4 + >>> p = nx.shortest_path_length(G, target=4) # source not specified + >>> p[0] + 4 + >>> p = dict(nx.shortest_path_length(G)) # source,target not specified + >>> p[0][4] + 4 + + Notes + ----- + The length of the path is always 1 less than the number of nodes involved + in the path since the length measures the number of edges followed. + + For digraphs this returns the shortest directed path length. To find path + lengths in the reverse direction use G.reverse(copy=False) first to flip + the edge orientation. + + See Also + -------- + all_pairs_shortest_path_length + all_pairs_dijkstra_path_length + all_pairs_bellman_ford_path_length + single_source_shortest_path_length + single_source_dijkstra_path_length + single_source_bellman_ford_path_length + """ + if method not in ("dijkstra", "bellman-ford"): + # so we don't need to check in each branch later + raise ValueError(f"method not supported: {method}") + method = "unweighted" if weight is None else method + if source is None: + if target is None: + # Find paths between all pairs. + if method == "unweighted": + paths = nx.all_pairs_shortest_path_length(G) + elif method == "dijkstra": + paths = nx.all_pairs_dijkstra_path_length(G, weight=weight) + else: # method == 'bellman-ford': + paths = nx.all_pairs_bellman_ford_path_length(G, weight=weight) + else: + # Find paths from all nodes co-accessible to the target. + if G.is_directed(): + G = G.reverse(copy=False) + if method == "unweighted": + path_length = nx.single_source_shortest_path_length + paths = path_length(G, target) + elif method == "dijkstra": + path_length = nx.single_source_dijkstra_path_length + paths = path_length(G, target, weight=weight) + else: # method == 'bellman-ford': + path_length = nx.single_source_bellman_ford_path_length + paths = path_length(G, target, weight=weight) + else: + if target is None: + # Find paths to all nodes accessible from the source. + if method == "unweighted": + paths = nx.single_source_shortest_path_length(G, source) + elif method == "dijkstra": + path_length = nx.single_source_dijkstra_path_length + paths = path_length(G, source, weight=weight) + else: # method == 'bellman-ford': + path_length = nx.single_source_bellman_ford_path_length + paths = path_length(G, source, weight=weight) + else: + # Find shortest source-target path. + if method == "unweighted": + p = nx.bidirectional_shortest_path(G, source, target) + paths = len(p) - 1 + elif method == "dijkstra": + paths = nx.dijkstra_path_length(G, source, target, weight) + else: # method == 'bellman-ford': + paths = nx.bellman_ford_path_length(G, source, target, weight) + return paths + + +@nx._dispatchable(edge_attrs="weight") +def average_shortest_path_length(G, weight=None, method=None): + r"""Returns the average shortest path length. + + The average shortest path length is + + .. math:: + + a =\sum_{\substack{s,t \in V \\ s\neq t}} \frac{d(s, t)}{n(n-1)} + + where `V` is the set of nodes in `G`, + `d(s, t)` is the shortest path from `s` to `t`, + and `n` is the number of nodes in `G`. + + .. versionchanged:: 3.0 + An exception is raised for directed graphs that are not strongly + connected. + + Parameters + ---------- + G : NetworkX graph + + weight : None, string or function, optional (default = None) + If None, every edge has weight/distance/cost 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly + three positional arguments: the two endpoints of an edge and + the dictionary of edge attributes for that edge. + The function must return a number. + + method : string, optional (default = 'unweighted' or 'dijkstra') + The algorithm to use to compute the path lengths. + Supported options are 'unweighted', 'dijkstra', 'bellman-ford', + 'floyd-warshall' and 'floyd-warshall-numpy'. + Other method values produce a ValueError. + The default method is 'unweighted' if `weight` is None, + otherwise the default method is 'dijkstra'. + + Raises + ------ + NetworkXPointlessConcept + If `G` is the null graph (that is, the graph on zero nodes). + + NetworkXError + If `G` is not connected (or not strongly connected, in the case + of a directed graph). + + ValueError + If `method` is not among the supported options. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.average_shortest_path_length(G) + 2.0 + + For disconnected graphs, you can compute the average shortest path + length for each component + + >>> G = nx.Graph([(1, 2), (3, 4)]) + >>> for C in (G.subgraph(c).copy() for c in nx.connected_components(G)): + ... print(nx.average_shortest_path_length(C)) + 1.0 + 1.0 + + """ + single_source_methods = ["unweighted", "dijkstra", "bellman-ford"] + all_pairs_methods = ["floyd-warshall", "floyd-warshall-numpy"] + supported_methods = single_source_methods + all_pairs_methods + + if method is None: + method = "unweighted" if weight is None else "dijkstra" + if method not in supported_methods: + raise ValueError(f"method not supported: {method}") + + n = len(G) + # For the special case of the null graph, raise an exception, since + # there are no paths in the null graph. + if n == 0: + msg = ( + "the null graph has no paths, thus there is no average shortest path length" + ) + raise nx.NetworkXPointlessConcept(msg) + # For the special case of the trivial graph, return zero immediately. + if n == 1: + return 0 + # Shortest path length is undefined if the graph is not strongly connected. + if G.is_directed() and not nx.is_strongly_connected(G): + raise nx.NetworkXError("Graph is not strongly connected.") + # Shortest path length is undefined if the graph is not connected. + if not G.is_directed() and not nx.is_connected(G): + raise nx.NetworkXError("Graph is not connected.") + + # Compute all-pairs shortest paths. + def path_length(v): + if method == "unweighted": + return nx.single_source_shortest_path_length(G, v) + elif method == "dijkstra": + return nx.single_source_dijkstra_path_length(G, v, weight=weight) + elif method == "bellman-ford": + return nx.single_source_bellman_ford_path_length(G, v, weight=weight) + + if method in single_source_methods: + # Sum the distances for each (ordered) pair of source and target node. + s = sum(l for u in G for l in path_length(u).values()) + else: + if method == "floyd-warshall": + all_pairs = nx.floyd_warshall(G, weight=weight) + s = sum(sum(t.values()) for t in all_pairs.values()) + elif method == "floyd-warshall-numpy": + s = float(nx.floyd_warshall_numpy(G, weight=weight).sum()) + return s / (n * (n - 1)) + + +@nx._dispatchable(edge_attrs="weight") +def all_shortest_paths(G, source, target, weight=None, method="dijkstra"): + """Compute all shortest simple paths in the graph. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path. + + target : node + Ending node for path. + + weight : None, string or function, optional (default = None) + If None, every edge has weight/distance/cost 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly + three positional arguments: the two endpoints of an edge and + the dictionary of edge attributes for that edge. + The function must return a number. + + method : string, optional (default = 'dijkstra') + The algorithm to use to compute the path lengths. + Supported options: 'dijkstra', 'bellman-ford'. + Other inputs produce a ValueError. + If `weight` is None, unweighted graph methods are used, and this + suggestion is ignored. + + Returns + ------- + paths : generator of lists + A generator of all paths between source and target. + + Raises + ------ + ValueError + If `method` is not among the supported options. + + NetworkXNoPath + If `target` cannot be reached from `source`. + + Examples + -------- + >>> G = nx.Graph() + >>> nx.add_path(G, [0, 1, 2]) + >>> nx.add_path(G, [0, 10, 2]) + >>> print([p for p in nx.all_shortest_paths(G, source=0, target=2)]) + [[0, 1, 2], [0, 10, 2]] + + Notes + ----- + There may be many shortest paths between the source and target. If G + contains zero-weight cycles, this function will not produce all shortest + paths because doing so would produce infinitely many paths of unbounded + length -- instead, we only produce the shortest simple paths. + + See Also + -------- + shortest_path + single_source_shortest_path + all_pairs_shortest_path + """ + method = "unweighted" if weight is None else method + if method == "unweighted": + pred = nx.predecessor(G, source) + elif method == "dijkstra": + pred, dist = nx.dijkstra_predecessor_and_distance(G, source, weight=weight) + elif method == "bellman-ford": + pred, dist = nx.bellman_ford_predecessor_and_distance(G, source, weight=weight) + else: + raise ValueError(f"method not supported: {method}") + + return _build_paths_from_predecessors({source}, target, pred) + + +@nx._dispatchable(edge_attrs="weight") +def single_source_all_shortest_paths(G, source, weight=None, method="dijkstra"): + """Compute all shortest simple paths from the given source in the graph. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path. + + weight : None, string or function, optional (default = None) + If None, every edge has weight/distance/cost 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly + three positional arguments: the two endpoints of an edge and + the dictionary of edge attributes for that edge. + The function must return a number. + + method : string, optional (default = 'dijkstra') + The algorithm to use to compute the path lengths. + Supported options: 'dijkstra', 'bellman-ford'. + Other inputs produce a ValueError. + If `weight` is None, unweighted graph methods are used, and this + suggestion is ignored. + + Returns + ------- + paths : generator of dictionary + A generator of all paths between source and all nodes in the graph. + + Raises + ------ + ValueError + If `method` is not among the supported options. + + Examples + -------- + >>> G = nx.Graph() + >>> nx.add_path(G, [0, 1, 2, 3, 0]) + >>> dict(nx.single_source_all_shortest_paths(G, source=0)) + {0: [[0]], 1: [[0, 1]], 3: [[0, 3]], 2: [[0, 1, 2], [0, 3, 2]]} + + Notes + ----- + There may be many shortest paths between the source and target. If G + contains zero-weight cycles, this function will not produce all shortest + paths because doing so would produce infinitely many paths of unbounded + length -- instead, we only produce the shortest simple paths. + + See Also + -------- + shortest_path + all_shortest_paths + single_source_shortest_path + all_pairs_shortest_path + all_pairs_all_shortest_paths + """ + method = "unweighted" if weight is None else method + if method == "unweighted": + pred = nx.predecessor(G, source) + elif method == "dijkstra": + pred, dist = nx.dijkstra_predecessor_and_distance(G, source, weight=weight) + elif method == "bellman-ford": + pred, dist = nx.bellman_ford_predecessor_and_distance(G, source, weight=weight) + else: + raise ValueError(f"method not supported: {method}") + for n in pred: + yield n, list(_build_paths_from_predecessors({source}, n, pred)) + + +@nx._dispatchable(edge_attrs="weight") +def all_pairs_all_shortest_paths(G, weight=None, method="dijkstra"): + """Compute all shortest paths between all nodes. + + Parameters + ---------- + G : NetworkX graph + + weight : None, string or function, optional (default = None) + If None, every edge has weight/distance/cost 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly + three positional arguments: the two endpoints of an edge and + the dictionary of edge attributes for that edge. + The function must return a number. + + method : string, optional (default = 'dijkstra') + The algorithm to use to compute the path lengths. + Supported options: 'dijkstra', 'bellman-ford'. + Other inputs produce a ValueError. + If `weight` is None, unweighted graph methods are used, and this + suggestion is ignored. + + Returns + ------- + paths : generator of dictionary + Dictionary of arrays, keyed by source and target, of all shortest paths. + + Raises + ------ + ValueError + If `method` is not among the supported options. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> dict(nx.all_pairs_all_shortest_paths(G))[0][2] + [[0, 1, 2], [0, 3, 2]] + >>> dict(nx.all_pairs_all_shortest_paths(G))[0][3] + [[0, 3]] + + Notes + ----- + There may be multiple shortest paths with equal lengths. Unlike + all_pairs_shortest_path, this method returns all shortest paths. + + See Also + -------- + all_pairs_shortest_path + single_source_all_shortest_paths + """ + for n in G: + yield ( + n, + dict(single_source_all_shortest_paths(G, n, weight=weight, method=method)), + ) + + +def _build_paths_from_predecessors(sources, target, pred): + """Compute all simple paths to target, given the predecessors found in + pred, terminating when any source in sources is found. + + Parameters + ---------- + sources : set + Starting nodes for path. + + target : node + Ending node for path. + + pred : dict + A dictionary of predecessor lists, keyed by node + + Returns + ------- + paths : generator of lists + A generator of all paths between source and target. + + Raises + ------ + NetworkXNoPath + If `target` cannot be reached from `source`. + + Notes + ----- + There may be many paths between the sources and target. If there are + cycles among the predecessors, this function will not produce all + possible paths because doing so would produce infinitely many paths + of unbounded length -- instead, we only produce simple paths. + + See Also + -------- + shortest_path + single_source_shortest_path + all_pairs_shortest_path + all_shortest_paths + bellman_ford_path + """ + if target not in pred: + raise nx.NetworkXNoPath(f"Target {target} cannot be reached from given sources") + + seen = {target} + stack = [[target, 0]] + top = 0 + while top >= 0: + node, i = stack[top] + if node in sources: + yield [p for p, n in reversed(stack[: top + 1])] + if len(pred[node]) > i: + stack[top][1] = i + 1 + next = pred[node][i] + if next in seen: + continue + else: + seen.add(next) + top += 1 + if top == len(stack): + stack.append([next, 0]) + else: + stack[top][:] = [next, 0] + else: + seen.discard(node) + top -= 1 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..e0440a4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_astar.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_astar.cpython-312.pyc new file mode 100644 index 0000000..4207308 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_astar.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_dense.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_dense.cpython-312.pyc new file mode 100644 index 0000000..11b1b04 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_dense.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_dense_numpy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_dense_numpy.cpython-312.pyc new file mode 100644 index 0000000..7140d46 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_dense_numpy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_generic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_generic.cpython-312.pyc new file mode 100644 index 0000000..ea4f258 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_generic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_unweighted.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_unweighted.cpython-312.pyc new file mode 100644 index 0000000..03635e9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_unweighted.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_weighted.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_weighted.cpython-312.pyc new file mode 100644 index 0000000..a940c65 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/__pycache__/test_weighted.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_astar.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_astar.py new file mode 100644 index 0000000..3abf211 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_astar.py @@ -0,0 +1,254 @@ +import pytest + +import networkx as nx +from networkx.utils import pairwise + + +class TestAStar: + @classmethod + def setup_class(cls): + edges = [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + cls.XG = nx.DiGraph() + cls.XG.add_weighted_edges_from(edges) + + def test_multiple_optimal_paths(self): + """Tests that A* algorithm finds any of multiple optimal paths""" + heuristic_values = {"a": 1.35, "b": 1.18, "c": 0.67, "d": 0} + + def h(u, v): + return heuristic_values[u] + + graph = nx.Graph() + points = ["a", "b", "c", "d"] + edges = [("a", "b", 0.18), ("a", "c", 0.68), ("b", "c", 0.50), ("c", "d", 0.67)] + + graph.add_nodes_from(points) + graph.add_weighted_edges_from(edges) + + path1 = ["a", "c", "d"] + path2 = ["a", "b", "c", "d"] + assert nx.astar_path(graph, "a", "d", h) in (path1, path2) + + def test_astar_directed(self): + assert nx.astar_path(self.XG, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(self.XG, "s", "v") == 9 + + def test_astar_directed_weight_function(self): + def w1(u, v, d): + return d["weight"] + + assert nx.astar_path(self.XG, "x", "u", weight=w1) == ["x", "u"] + assert nx.astar_path_length(self.XG, "x", "u", weight=w1) == 3 + assert nx.astar_path(self.XG, "s", "v", weight=w1) == ["s", "x", "u", "v"] + assert nx.astar_path_length(self.XG, "s", "v", weight=w1) == 9 + + def w2(u, v, d): + return None if (u, v) == ("x", "u") else d["weight"] + + assert nx.astar_path(self.XG, "x", "u", weight=w2) == ["x", "y", "s", "u"] + assert nx.astar_path_length(self.XG, "x", "u", weight=w2) == 19 + assert nx.astar_path(self.XG, "s", "v", weight=w2) == ["s", "x", "v"] + assert nx.astar_path_length(self.XG, "s", "v", weight=w2) == 10 + + def w3(u, v, d): + return d["weight"] + 10 + + assert nx.astar_path(self.XG, "x", "u", weight=w3) == ["x", "u"] + assert nx.astar_path_length(self.XG, "x", "u", weight=w3) == 13 + assert nx.astar_path(self.XG, "s", "v", weight=w3) == ["s", "x", "v"] + assert nx.astar_path_length(self.XG, "s", "v", weight=w3) == 30 + + def test_astar_multigraph(self): + G = nx.MultiDiGraph(self.XG) + G.add_weighted_edges_from((u, v, 1000) for (u, v) in list(G.edges())) + assert nx.astar_path(G, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(G, "s", "v") == 9 + + def test_astar_undirected(self): + GG = self.XG.to_undirected() + # make sure we get lower weight + # to_undirected might choose either edge with weight 2 or weight 3 + GG["u"]["x"]["weight"] = 2 + GG["y"]["v"]["weight"] = 2 + assert nx.astar_path(GG, "s", "v") == ["s", "x", "u", "v"] + assert nx.astar_path_length(GG, "s", "v") == 8 + + def test_astar_directed2(self): + XG2 = nx.DiGraph() + edges = [ + (1, 4, 1), + (4, 5, 1), + (5, 6, 1), + (6, 3, 1), + (1, 3, 50), + (1, 2, 100), + (2, 3, 100), + ] + XG2.add_weighted_edges_from(edges) + assert nx.astar_path(XG2, 1, 3) == [1, 4, 5, 6, 3] + + def test_astar_undirected2(self): + XG3 = nx.Graph() + edges = [(0, 1, 2), (1, 2, 12), (2, 3, 1), (3, 4, 5), (4, 5, 1), (5, 0, 10)] + XG3.add_weighted_edges_from(edges) + assert nx.astar_path(XG3, 0, 3) == [0, 1, 2, 3] + assert nx.astar_path_length(XG3, 0, 3) == 15 + + def test_astar_undirected3(self): + XG4 = nx.Graph() + edges = [ + (0, 1, 2), + (1, 2, 2), + (2, 3, 1), + (3, 4, 1), + (4, 5, 1), + (5, 6, 1), + (6, 7, 1), + (7, 0, 1), + ] + XG4.add_weighted_edges_from(edges) + assert nx.astar_path(XG4, 0, 2) == [0, 1, 2] + assert nx.astar_path_length(XG4, 0, 2) == 4 + + """ Tests that A* finds correct path when multiple paths exist + and the best one is not expanded first (GH issue #3464) + """ + + def test_astar_directed3(self): + heuristic_values = {"n5": 36, "n2": 4, "n1": 0, "n0": 0} + + def h(u, v): + return heuristic_values[u] + + edges = [("n5", "n1", 11), ("n5", "n2", 9), ("n2", "n1", 1), ("n1", "n0", 32)] + graph = nx.DiGraph() + graph.add_weighted_edges_from(edges) + answer = ["n5", "n2", "n1", "n0"] + assert nx.astar_path(graph, "n5", "n0", h) == answer + + """ Tests that parent is not wrongly overridden when a node + is re-explored multiple times. + """ + + def test_astar_directed4(self): + edges = [ + ("a", "b", 1), + ("a", "c", 1), + ("b", "d", 2), + ("c", "d", 1), + ("d", "e", 1), + ] + graph = nx.DiGraph() + graph.add_weighted_edges_from(edges) + assert nx.astar_path(graph, "a", "e") == ["a", "c", "d", "e"] + + # >>> MXG4=NX.MultiGraph(XG4) + # >>> MXG4.add_edge(0,1,3) + # >>> NX.dijkstra_path(MXG4,0,2) + # [0, 1, 2] + + def test_astar_w1(self): + G = nx.DiGraph() + G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "w"), + ("w", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) + assert nx.astar_path(G, "s", "v") == ["s", "u", "v"] + assert nx.astar_path_length(G, "s", "v") == 2 + + def test_astar_nopath(self): + with pytest.raises(nx.NodeNotFound): + nx.astar_path(self.XG, "s", "moon") + + def test_astar_cutoff(self): + with pytest.raises(nx.NetworkXNoPath): + # optimal path_length in XG is 9 + nx.astar_path(self.XG, "s", "v", cutoff=8.0) + with pytest.raises(nx.NetworkXNoPath): + nx.astar_path_length(self.XG, "s", "v", cutoff=8.0) + + def test_astar_admissible_heuristic_with_cutoff(self): + heuristic_values = {"s": 36, "y": 4, "x": 0, "u": 0, "v": 0} + + def h(u, v): + return heuristic_values[u] + + assert nx.astar_path_length(self.XG, "s", "v") == 9 + assert nx.astar_path_length(self.XG, "s", "v", heuristic=h) == 9 + assert nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=12) == 9 + assert nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=9) == 9 + with pytest.raises(nx.NetworkXNoPath): + nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=8) + + def test_astar_inadmissible_heuristic_with_cutoff(self): + heuristic_values = {"s": 36, "y": 14, "x": 10, "u": 10, "v": 0} + + def h(u, v): + return heuristic_values[u] + + # optimal path_length in XG is 9. This heuristic gives over-estimate. + assert nx.astar_path_length(self.XG, "s", "v", heuristic=h) == 10 + assert nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=15) == 10 + with pytest.raises(nx.NetworkXNoPath): + nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=9) + with pytest.raises(nx.NetworkXNoPath): + nx.astar_path_length(self.XG, "s", "v", heuristic=h, cutoff=12) + + def test_astar_cutoff2(self): + assert nx.astar_path(self.XG, "s", "v", cutoff=10.0) == ["s", "x", "u", "v"] + assert nx.astar_path_length(self.XG, "s", "v") == 9 + + def test_cycle(self): + C = nx.cycle_graph(7) + assert nx.astar_path(C, 0, 3) == [0, 1, 2, 3] + assert nx.dijkstra_path(C, 0, 4) == [0, 6, 5, 4] + + def test_unorderable_nodes(self): + """Tests that A* accommodates nodes that are not orderable. + + For more information, see issue #554. + + """ + # Create the cycle graph on four nodes, with nodes represented + # as (unorderable) Python objects. + nodes = [object() for n in range(4)] + G = nx.Graph() + G.add_edges_from(pairwise(nodes, cyclic=True)) + path = nx.astar_path(G, nodes[0], nodes[2]) + assert len(path) == 3 + + def test_astar_NetworkXNoPath(self): + """Tests that exception is raised when there exists no + path between source and target""" + G = nx.gnp_random_graph(10, 0.2, seed=10) + with pytest.raises(nx.NetworkXNoPath): + nx.astar_path(G, 4, 9) + + def test_astar_NodeNotFound(self): + """Tests that exception is raised when either + source or target is not in graph""" + G = nx.gnp_random_graph(10, 0.2, seed=10) + with pytest.raises(nx.NodeNotFound): + nx.astar_path_length(G, 11, 9) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense.py new file mode 100644 index 0000000..6923bfe --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense.py @@ -0,0 +1,212 @@ +import pytest + +import networkx as nx + + +class TestFloyd: + @classmethod + def setup_class(cls): + pass + + def test_floyd_warshall_predecessor_and_distance(self): + XG = nx.DiGraph() + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG) + assert dist["s"]["v"] == 9 + assert path["s"]["v"] == "u" + assert dist == { + "y": {"y": 0, "x": 12, "s": 7, "u": 15, "v": 6}, + "x": {"y": 2, "x": 0, "s": 9, "u": 3, "v": 4}, + "s": {"y": 7, "x": 5, "s": 0, "u": 8, "v": 9}, + "u": {"y": 2, "x": 2, "s": 9, "u": 0, "v": 1}, + "v": {"y": 1, "x": 13, "s": 8, "u": 16, "v": 0}, + } + + GG = XG.to_undirected() + # make sure we get lower weight + # to_undirected might choose either edge with weight 2 or weight 3 + GG["u"]["x"]["weight"] = 2 + path, dist = nx.floyd_warshall_predecessor_and_distance(GG) + assert dist["s"]["v"] == 8 + # skip this test, could be alternate path s-u-v + # assert_equal(path['s']['v'],'y') + + G = nx.DiGraph() # no weights + G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(G) + assert dist["s"]["v"] == 2 + # skip this test, could be alternate path s-u-v + # assert_equal(path['s']['v'],'x') + + # alternate interface + dist = nx.floyd_warshall(G) + assert dist["s"]["v"] == 2 + + # floyd_warshall_predecessor_and_distance returns + # dicts-of-defautdicts + # make sure we don't get empty dictionary + XG = nx.DiGraph() + XG.add_weighted_edges_from( + [("v", "x", 5.0), ("y", "x", 5.0), ("v", "y", 6.0), ("x", "u", 2.0)] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG) + inf = float("inf") + assert dist == { + "v": {"v": 0, "x": 5.0, "y": 6.0, "u": 7.0}, + "x": {"x": 0, "u": 2.0, "v": inf, "y": inf}, + "y": {"y": 0, "x": 5.0, "v": inf, "u": 7.0}, + "u": {"u": 0, "v": inf, "x": inf, "y": inf}, + } + assert path == { + "v": {"x": "v", "y": "v", "u": "x"}, + "x": {"u": "x"}, + "y": {"x": "y", "u": "x"}, + } + + def test_reconstruct_path(self): + with pytest.raises(KeyError): + XG = nx.DiGraph() + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + predecessors, _ = nx.floyd_warshall_predecessor_and_distance(XG) + + path = nx.reconstruct_path("s", "v", predecessors) + assert path == ["s", "x", "u", "v"] + + path = nx.reconstruct_path("s", "s", predecessors) + assert path == [] + + # this part raises the keyError + nx.reconstruct_path("1", "2", predecessors) + + def test_cycle(self): + path, dist = nx.floyd_warshall_predecessor_and_distance(nx.cycle_graph(7)) + assert dist[0][3] == 3 + assert path[0][3] == 2 + assert dist[0][4] == 3 + + def test_weighted(self): + XG3 = nx.Graph() + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG3) + assert dist[0][3] == 15 + assert path[0][3] == 2 + + def test_weighted2(self): + XG4 = nx.Graph() + XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG4) + assert dist[0][2] == 4 + assert path[0][2] == 1 + + def test_weight_parameter(self): + XG4 = nx.Graph() + XG4.add_edges_from( + [ + (0, 1, {"heavy": 2}), + (1, 2, {"heavy": 2}), + (2, 3, {"heavy": 1}), + (3, 4, {"heavy": 1}), + (4, 5, {"heavy": 1}), + (5, 6, {"heavy": 1}), + (6, 7, {"heavy": 1}), + (7, 0, {"heavy": 1}), + ] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG4, weight="heavy") + assert dist[0][2] == 4 + assert path[0][2] == 1 + + def test_zero_distance(self): + XG = nx.DiGraph() + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + path, dist = nx.floyd_warshall_predecessor_and_distance(XG) + + for u in XG: + assert dist[u][u] == 0 + + GG = XG.to_undirected() + # make sure we get lower weight + # to_undirected might choose either edge with weight 2 or weight 3 + GG["u"]["x"]["weight"] = 2 + path, dist = nx.floyd_warshall_predecessor_and_distance(GG) + + for u in GG: + dist[u][u] = 0 + + def test_zero_weight(self): + G = nx.DiGraph() + edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)] + G.add_weighted_edges_from(edges) + dist = nx.floyd_warshall(G) + assert dist[1][3] == -14 + + G = nx.MultiDiGraph() + edges.append((2, 5, -7)) + G.add_weighted_edges_from(edges) + dist = nx.floyd_warshall(G) + assert dist[1][3] == -14 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py new file mode 100644 index 0000000..6eb95a5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py @@ -0,0 +1,88 @@ +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") + + +def test_cycle_numpy(): + dist = nx.floyd_warshall_numpy(nx.cycle_graph(7)) + assert dist[0, 3] == 3 + assert dist[0, 4] == 3 + + +def test_weighted_numpy_three_edges(): + XG3 = nx.Graph() + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) + dist = nx.floyd_warshall_numpy(XG3) + assert dist[0, 3] == 15 + + +def test_weighted_numpy_two_edges(): + XG4 = nx.Graph() + XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) + dist = nx.floyd_warshall_numpy(XG4) + assert dist[0, 2] == 4 + + +def test_weight_parameter_numpy(): + XG4 = nx.Graph() + XG4.add_edges_from( + [ + (0, 1, {"heavy": 2}), + (1, 2, {"heavy": 2}), + (2, 3, {"heavy": 1}), + (3, 4, {"heavy": 1}), + (4, 5, {"heavy": 1}), + (5, 6, {"heavy": 1}), + (6, 7, {"heavy": 1}), + (7, 0, {"heavy": 1}), + ] + ) + dist = nx.floyd_warshall_numpy(XG4, weight="heavy") + assert dist[0, 2] == 4 + + +def test_directed_cycle_numpy(): + G = nx.DiGraph() + nx.add_cycle(G, [0, 1, 2, 3]) + pred, dist = nx.floyd_warshall_predecessor_and_distance(G) + D = nx.utils.dict_to_numpy_array(dist) + np.testing.assert_equal(nx.floyd_warshall_numpy(G), D) + + +def test_zero_weight(): + G = nx.DiGraph() + edges = [(1, 2, -2), (2, 3, -4), (1, 5, 1), (5, 4, 0), (4, 3, -5), (2, 5, -7)] + G.add_weighted_edges_from(edges) + dist = nx.floyd_warshall_numpy(G) + assert int(np.min(dist)) == -14 + + G = nx.MultiDiGraph() + edges.append((2, 5, -7)) + G.add_weighted_edges_from(edges) + dist = nx.floyd_warshall_numpy(G) + assert int(np.min(dist)) == -14 + + +def test_nodelist(): + G = nx.path_graph(7) + dist = nx.floyd_warshall_numpy(G, nodelist=[3, 5, 4, 6, 2, 1, 0]) + assert dist[0, 3] == 3 + assert dist[0, 1] == 2 + assert dist[6, 2] == 4 + pytest.raises(nx.NetworkXError, nx.floyd_warshall_numpy, G, [1, 3]) + pytest.raises(nx.NetworkXError, nx.floyd_warshall_numpy, G, list(range(9))) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_generic.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_generic.py new file mode 100644 index 0000000..578c83b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_generic.py @@ -0,0 +1,450 @@ +import pytest + +import networkx as nx + + +def validate_grid_path(r, c, s, t, p): + assert isinstance(p, list) + assert p[0] == s + assert p[-1] == t + s = ((s - 1) // c, (s - 1) % c) + t = ((t - 1) // c, (t - 1) % c) + assert len(p) == abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1 + p = [((u - 1) // c, (u - 1) % c) for u in p] + for u in p: + assert 0 <= u[0] < r + assert 0 <= u[1] < c + for u, v in zip(p[:-1], p[1:]): + assert (abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)] + + +class TestGenericPath: + @classmethod + def setup_class(cls): + from networkx import convert_node_labels_to_integers as cnlti + + cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + cls.cycle = nx.cycle_graph(7) + cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + cls.neg_weights = nx.DiGraph() + cls.neg_weights.add_edge(0, 1, weight=1) + cls.neg_weights.add_edge(0, 2, weight=3) + cls.neg_weights.add_edge(1, 3, weight=1) + cls.neg_weights.add_edge(2, 3, weight=-2) + + def test_shortest_path(self): + assert nx.shortest_path(self.cycle, 0, 3) == [0, 1, 2, 3] + assert nx.shortest_path(self.cycle, 0, 4) == [0, 6, 5, 4] + validate_grid_path(4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12)) + assert nx.shortest_path(self.directed_cycle, 0, 3) == [0, 1, 2, 3] + # now with weights + assert nx.shortest_path(self.cycle, 0, 3, weight="weight") == [0, 1, 2, 3] + assert nx.shortest_path(self.cycle, 0, 4, weight="weight") == [0, 6, 5, 4] + validate_grid_path( + 4, 4, 1, 12, nx.shortest_path(self.grid, 1, 12, weight="weight") + ) + assert nx.shortest_path(self.directed_cycle, 0, 3, weight="weight") == [ + 0, + 1, + 2, + 3, + ] + # weights and method specified + assert nx.shortest_path( + self.directed_cycle, 0, 3, weight="weight", method="dijkstra" + ) == [0, 1, 2, 3] + assert nx.shortest_path( + self.directed_cycle, 0, 3, weight="weight", method="bellman-ford" + ) == [0, 1, 2, 3] + # when Dijkstra's will probably (depending on precise implementation) + # incorrectly return [0, 1, 3] instead + assert nx.shortest_path( + self.neg_weights, 0, 3, weight="weight", method="bellman-ford" + ) == [0, 2, 3] + # confirm bad method rejection + pytest.raises(ValueError, nx.shortest_path, self.cycle, method="SPAM") + # confirm absent source rejection + pytest.raises(nx.NodeNotFound, nx.shortest_path, self.cycle, 8) + + def test_shortest_path_target(self): + answer = {0: [0, 1], 1: [1], 2: [2, 1]} + sp = nx.shortest_path(nx.path_graph(3), target=1) + assert sp == answer + # with weights + sp = nx.shortest_path(nx.path_graph(3), target=1, weight="weight") + assert sp == answer + # weights and method specified + sp = nx.shortest_path( + nx.path_graph(3), target=1, weight="weight", method="dijkstra" + ) + assert sp == answer + sp = nx.shortest_path( + nx.path_graph(3), target=1, weight="weight", method="bellman-ford" + ) + assert sp == answer + + def test_shortest_path_length(self): + assert nx.shortest_path_length(self.cycle, 0, 3) == 3 + assert nx.shortest_path_length(self.grid, 1, 12) == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4) == 4 + # now with weights + assert nx.shortest_path_length(self.cycle, 0, 3, weight="weight") == 3 + assert nx.shortest_path_length(self.grid, 1, 12, weight="weight") == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4, weight="weight") == 4 + # weights and method specified + assert ( + nx.shortest_path_length( + self.cycle, 0, 3, weight="weight", method="dijkstra" + ) + == 3 + ) + assert ( + nx.shortest_path_length( + self.cycle, 0, 3, weight="weight", method="bellman-ford" + ) + == 3 + ) + # confirm bad method rejection + pytest.raises(ValueError, nx.shortest_path_length, self.cycle, method="SPAM") + # confirm absent source rejection + pytest.raises(nx.NodeNotFound, nx.shortest_path_length, self.cycle, 8) + + def test_shortest_path_length_target(self): + answer = {0: 1, 1: 0, 2: 1} + sp = nx.shortest_path_length(nx.path_graph(3), target=1) + assert sp == answer + # with weights + sp = nx.shortest_path_length(nx.path_graph(3), target=1, weight="weight") + assert sp == answer + # weights and method specified + sp = nx.shortest_path_length( + nx.path_graph(3), target=1, weight="weight", method="dijkstra" + ) + assert sp == answer + sp = nx.shortest_path_length( + nx.path_graph(3), target=1, weight="weight", method="bellman-ford" + ) + assert sp == answer + + def test_single_source_shortest_path(self): + p = nx.shortest_path(self.cycle, 0) + assert p[3] == [0, 1, 2, 3] + assert p == nx.single_source_shortest_path(self.cycle, 0) + p = nx.shortest_path(self.grid, 1) + validate_grid_path(4, 4, 1, 12, p[12]) + # now with weights + p = nx.shortest_path(self.cycle, 0, weight="weight") + assert p[3] == [0, 1, 2, 3] + assert p == nx.single_source_dijkstra_path(self.cycle, 0) + p = nx.shortest_path(self.grid, 1, weight="weight") + validate_grid_path(4, 4, 1, 12, p[12]) + # weights and method specified + p = nx.shortest_path(self.cycle, 0, method="dijkstra", weight="weight") + assert p[3] == [0, 1, 2, 3] + assert p == nx.single_source_shortest_path(self.cycle, 0) + p = nx.shortest_path(self.cycle, 0, method="bellman-ford", weight="weight") + assert p[3] == [0, 1, 2, 3] + assert p == nx.single_source_shortest_path(self.cycle, 0) + + def test_single_source_shortest_path_length(self): + ans = nx.shortest_path_length(self.cycle, 0) + assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == nx.single_source_shortest_path_length(self.cycle, 0) + ans = nx.shortest_path_length(self.grid, 1) + assert ans[16] == 6 + # now with weights + ans = nx.shortest_path_length(self.cycle, 0, weight="weight") + assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == nx.single_source_dijkstra_path_length(self.cycle, 0) + ans = nx.shortest_path_length(self.grid, 1, weight="weight") + assert ans[16] == 6 + # weights and method specified + ans = dict( + nx.shortest_path_length(self.cycle, 0, weight="weight", method="dijkstra") + ) + assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == nx.single_source_dijkstra_path_length(self.cycle, 0) + ans = dict( + nx.shortest_path_length( + self.cycle, 0, weight="weight", method="bellman-ford" + ) + ) + assert ans == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == nx.single_source_bellman_ford_path_length(self.cycle, 0) + + def test_single_source_all_shortest_paths(self): + cycle_ans = {0: [[0]], 1: [[0, 1]], 2: [[0, 1, 2], [0, 3, 2]], 3: [[0, 3]]} + ans = dict(nx.single_source_all_shortest_paths(nx.cycle_graph(4), 0)) + assert sorted(ans[2]) == cycle_ans[2] + ans = dict(nx.single_source_all_shortest_paths(self.grid, 1)) + grid_ans = [ + [1, 2, 3, 7, 11], + [1, 2, 6, 7, 11], + [1, 2, 6, 10, 11], + [1, 5, 6, 7, 11], + [1, 5, 6, 10, 11], + [1, 5, 9, 10, 11], + ] + assert sorted(ans[11]) == grid_ans + ans = dict( + nx.single_source_all_shortest_paths(nx.cycle_graph(4), 0, weight="weight") + ) + assert sorted(ans[2]) == cycle_ans[2] + ans = dict( + nx.single_source_all_shortest_paths( + nx.cycle_graph(4), 0, method="bellman-ford", weight="weight" + ) + ) + assert sorted(ans[2]) == cycle_ans[2] + ans = dict(nx.single_source_all_shortest_paths(self.grid, 1, weight="weight")) + assert sorted(ans[11]) == grid_ans + ans = dict( + nx.single_source_all_shortest_paths( + self.grid, 1, method="bellman-ford", weight="weight" + ) + ) + assert sorted(ans[11]) == grid_ans + G = nx.cycle_graph(4) + G.add_node(4) + ans = dict(nx.single_source_all_shortest_paths(G, 0)) + assert sorted(ans[2]) == [[0, 1, 2], [0, 3, 2]] + ans = dict(nx.single_source_all_shortest_paths(G, 4)) + assert sorted(ans[4]) == [[4]] + + def test_all_pairs_shortest_path(self): + # shortest_path w/o source and target returns a generator instead of + # a dict beginning in version 3.5. Only the first call needed changed here. + p = dict(nx.shortest_path(self.cycle)) + assert p[0][3] == [0, 1, 2, 3] + assert p == dict(nx.all_pairs_shortest_path(self.cycle)) + p = dict(nx.shortest_path(self.grid)) + validate_grid_path(4, 4, 1, 12, p[1][12]) + # now with weights + p = dict(nx.shortest_path(self.cycle, weight="weight")) + assert p[0][3] == [0, 1, 2, 3] + assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) + p = dict(nx.shortest_path(self.grid, weight="weight")) + validate_grid_path(4, 4, 1, 12, p[1][12]) + # weights and method specified + p = dict(nx.shortest_path(self.cycle, weight="weight", method="dijkstra")) + assert p[0][3] == [0, 1, 2, 3] + assert p == dict(nx.all_pairs_dijkstra_path(self.cycle)) + p = dict(nx.shortest_path(self.cycle, weight="weight", method="bellman-ford")) + assert p[0][3] == [0, 1, 2, 3] + assert p == dict(nx.all_pairs_bellman_ford_path(self.cycle)) + + def test_all_pairs_shortest_path_length(self): + ans = dict(nx.shortest_path_length(self.cycle)) + assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.all_pairs_shortest_path_length(self.cycle)) + ans = dict(nx.shortest_path_length(self.grid)) + assert ans[1][16] == 6 + # now with weights + ans = dict(nx.shortest_path_length(self.cycle, weight="weight")) + assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.all_pairs_dijkstra_path_length(self.cycle)) + ans = dict(nx.shortest_path_length(self.grid, weight="weight")) + assert ans[1][16] == 6 + # weights and method specified + ans = dict( + nx.shortest_path_length(self.cycle, weight="weight", method="dijkstra") + ) + assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.all_pairs_dijkstra_path_length(self.cycle)) + ans = dict( + nx.shortest_path_length(self.cycle, weight="weight", method="bellman-ford") + ) + assert ans[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert ans == dict(nx.all_pairs_bellman_ford_path_length(self.cycle)) + + def test_all_pairs_all_shortest_paths(self): + ans = dict(nx.all_pairs_all_shortest_paths(nx.cycle_graph(4))) + assert sorted(ans[1][3]) == [[1, 0, 3], [1, 2, 3]] + ans = dict(nx.all_pairs_all_shortest_paths(nx.cycle_graph(4)), weight="weight") + assert sorted(ans[1][3]) == [[1, 0, 3], [1, 2, 3]] + ans = dict( + nx.all_pairs_all_shortest_paths(nx.cycle_graph(4)), + method="bellman-ford", + weight="weight", + ) + assert sorted(ans[1][3]) == [[1, 0, 3], [1, 2, 3]] + G = nx.cycle_graph(4) + G.add_node(4) + ans = dict(nx.all_pairs_all_shortest_paths(G)) + assert sorted(ans[4][4]) == [[4]] + + def test_has_path(self): + G = nx.Graph() + nx.add_path(G, range(3)) + nx.add_path(G, range(3, 5)) + assert nx.has_path(G, 0, 2) + assert not nx.has_path(G, 0, 4) + + def test_has_path_singleton(self): + G = nx.empty_graph(1) + assert nx.has_path(G, 0, 0) + + def test_all_shortest_paths(self): + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3]) + nx.add_path(G, [0, 10, 20, 3]) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted(nx.all_shortest_paths(G, 0, 3)) + # with weights + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3]) + nx.add_path(G, [0, 10, 20, 3]) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight") + ) + # weights and method specified + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3]) + nx.add_path(G, [0, 10, 20, 3]) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight", method="dijkstra") + ) + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3]) + nx.add_path(G, [0, 10, 20, 3]) + assert [[0, 1, 2, 3], [0, 10, 20, 3]] == sorted( + nx.all_shortest_paths(G, 0, 3, weight="weight", method="bellman-ford") + ) + + def test_all_shortest_paths_raise(self): + with pytest.raises(nx.NetworkXNoPath): + G = nx.path_graph(4) + G.add_node(4) + list(nx.all_shortest_paths(G, 0, 4)) + + def test_bad_method(self): + with pytest.raises(ValueError): + G = nx.path_graph(2) + list(nx.all_shortest_paths(G, 0, 1, weight="weight", method="SPAM")) + + def test_single_source_all_shortest_paths_bad_method(self): + with pytest.raises(ValueError): + G = nx.path_graph(2) + dict( + nx.single_source_all_shortest_paths( + G, 0, weight="weight", method="SPAM" + ) + ) + + def test_all_shortest_paths_zero_weight_edge(self): + g = nx.Graph() + nx.add_path(g, [0, 1, 3]) + nx.add_path(g, [0, 1, 2, 3]) + g.edges[1, 2]["weight"] = 0 + paths30d = list( + nx.all_shortest_paths(g, 3, 0, weight="weight", method="dijkstra") + ) + paths03d = list( + nx.all_shortest_paths(g, 0, 3, weight="weight", method="dijkstra") + ) + paths30b = list( + nx.all_shortest_paths(g, 3, 0, weight="weight", method="bellman-ford") + ) + paths03b = list( + nx.all_shortest_paths(g, 0, 3, weight="weight", method="bellman-ford") + ) + assert sorted(paths03d) == sorted(p[::-1] for p in paths30d) + assert sorted(paths03d) == sorted(p[::-1] for p in paths30b) + assert sorted(paths03b) == sorted(p[::-1] for p in paths30b) + + +class TestAverageShortestPathLength: + def test_cycle_graph(self): + ans = nx.average_shortest_path_length(nx.cycle_graph(7)) + assert ans == pytest.approx(2, abs=1e-7) + + def test_path_graph(self): + ans = nx.average_shortest_path_length(nx.path_graph(5)) + assert ans == pytest.approx(2, abs=1e-7) + + def test_weighted(self): + G = nx.Graph() + nx.add_cycle(G, range(7), weight=2) + ans = nx.average_shortest_path_length(G, weight="weight") + assert ans == pytest.approx(4, abs=1e-7) + G = nx.Graph() + nx.add_path(G, range(5), weight=2) + ans = nx.average_shortest_path_length(G, weight="weight") + assert ans == pytest.approx(4, abs=1e-7) + + def test_specified_methods(self): + G = nx.Graph() + nx.add_cycle(G, range(7), weight=2) + ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra") + assert ans == pytest.approx(4, abs=1e-7) + ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford") + assert ans == pytest.approx(4, abs=1e-7) + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall" + ) + assert ans == pytest.approx(4, abs=1e-7) + + G = nx.Graph() + nx.add_path(G, range(5), weight=2) + ans = nx.average_shortest_path_length(G, weight="weight", method="dijkstra") + assert ans == pytest.approx(4, abs=1e-7) + ans = nx.average_shortest_path_length(G, weight="weight", method="bellman-ford") + assert ans == pytest.approx(4, abs=1e-7) + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall" + ) + assert ans == pytest.approx(4, abs=1e-7) + + def test_directed_not_strongly_connected(self): + G = nx.DiGraph([(0, 1)]) + with pytest.raises(nx.NetworkXError, match="Graph is not strongly connected"): + nx.average_shortest_path_length(G) + + def test_undirected_not_connected(self): + g = nx.Graph() + g.add_nodes_from(range(3)) + g.add_edge(0, 1) + pytest.raises(nx.NetworkXError, nx.average_shortest_path_length, g) + + def test_trivial_graph(self): + """Tests that the trivial graph has average path length zero, + since there is exactly one path of length zero in the trivial + graph. + + For more information, see issue #1960. + + """ + G = nx.trivial_graph() + assert nx.average_shortest_path_length(G) == 0 + + def test_null_graph(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.average_shortest_path_length(nx.null_graph()) + + def test_bad_method(self): + with pytest.raises(ValueError): + G = nx.path_graph(2) + nx.average_shortest_path_length(G, weight="weight", method="SPAM") + + +class TestAverageShortestPathLengthNumpy: + @classmethod + def setup_class(cls): + global np + import pytest + + np = pytest.importorskip("numpy") + + def test_specified_methods_numpy(self): + G = nx.Graph() + nx.add_cycle(G, range(7), weight=2) + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall-numpy" + ) + np.testing.assert_almost_equal(ans, 4) + + G = nx.Graph() + nx.add_path(G, range(5), weight=2) + ans = nx.average_shortest_path_length( + G, weight="weight", method="floyd-warshall-numpy" + ) + np.testing.assert_almost_equal(ans, 4) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_unweighted.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_unweighted.py new file mode 100644 index 0000000..4adbd84 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_unweighted.py @@ -0,0 +1,149 @@ +import pytest + +import networkx as nx + + +def validate_grid_path(r, c, s, t, p): + assert isinstance(p, list) + assert p[0] == s + assert p[-1] == t + s = ((s - 1) // c, (s - 1) % c) + t = ((t - 1) // c, (t - 1) % c) + assert len(p) == abs(t[0] - s[0]) + abs(t[1] - s[1]) + 1 + p = [((u - 1) // c, (u - 1) % c) for u in p] + for u in p: + assert 0 <= u[0] < r + assert 0 <= u[1] < c + for u, v in zip(p[:-1], p[1:]): + assert (abs(v[0] - u[0]), abs(v[1] - u[1])) in [(0, 1), (1, 0)] + + +class TestUnweightedPath: + @classmethod + def setup_class(cls): + from networkx import convert_node_labels_to_integers as cnlti + + cls.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + cls.cycle = nx.cycle_graph(7) + cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + + def test_bidirectional_shortest_path(self): + assert nx.bidirectional_shortest_path(self.cycle, 0, 3) == [0, 1, 2, 3] + assert nx.bidirectional_shortest_path(self.cycle, 0, 4) == [0, 6, 5, 4] + validate_grid_path( + 4, 4, 1, 12, nx.bidirectional_shortest_path(self.grid, 1, 12) + ) + assert nx.bidirectional_shortest_path(self.directed_cycle, 0, 3) == [0, 1, 2, 3] + # test source = target + assert nx.bidirectional_shortest_path(self.cycle, 3, 3) == [3] + + @pytest.mark.parametrize( + ("src", "tgt"), + ( + (8, 3), # source not in graph + (3, 8), # target not in graph + (8, 10), # neither source nor target in graph + (8, 8), # src == tgt, neither in graph - tests order of input checks + ), + ) + def test_bidirectional_shortest_path_src_tgt_not_in_graph(self, src, tgt): + with pytest.raises( + nx.NodeNotFound, + match=f"(Source {src}|Target {tgt}) is not in G", + ): + nx.bidirectional_shortest_path(self.cycle, src, tgt) + + def test_shortest_path_length(self): + assert nx.shortest_path_length(self.cycle, 0, 3) == 3 + assert nx.shortest_path_length(self.grid, 1, 12) == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4) == 4 + # now with weights + assert nx.shortest_path_length(self.cycle, 0, 3, weight=True) == 3 + assert nx.shortest_path_length(self.grid, 1, 12, weight=True) == 5 + assert nx.shortest_path_length(self.directed_cycle, 0, 4, weight=True) == 4 + + def test_single_source_shortest_path(self): + p = nx.single_source_shortest_path(self.directed_cycle, 3) + assert p[0] == [3, 4, 5, 6, 0] + p = nx.single_source_shortest_path(self.cycle, 0) + assert p[3] == [0, 1, 2, 3] + p = nx.single_source_shortest_path(self.cycle, 0, cutoff=0) + assert p == {0: [0]} + + def test_single_source_shortest_path_length(self): + pl = nx.single_source_shortest_path_length + lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert dict(pl(self.cycle, 0)) == lengths + lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6} + assert dict(pl(self.directed_cycle, 0)) == lengths + + def test_single_target_shortest_path(self): + p = nx.single_target_shortest_path(self.directed_cycle, 0) + assert p[3] == [3, 4, 5, 6, 0] + p = nx.single_target_shortest_path(self.cycle, 0) + assert p[3] == [3, 2, 1, 0] + p = nx.single_target_shortest_path(self.cycle, 0, cutoff=0) + assert p == {0: [0]} + # test missing targets + target = 8 + with pytest.raises(nx.NodeNotFound, match=f"Target {target} not in G"): + nx.single_target_shortest_path(self.cycle, target) + + def test_single_target_shortest_path_length(self): + pl = nx.single_target_shortest_path_length + lengths = {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert pl(self.cycle, 0) == lengths + lengths = {0: 0, 1: 6, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} + assert pl(self.directed_cycle, 0) == lengths + # test missing targets + target = 8 + with pytest.raises(nx.NodeNotFound, match=f"Target {target} is not in G"): + nx.single_target_shortest_path_length(self.cycle, target) + + def test_all_pairs_shortest_path(self): + p = dict(nx.all_pairs_shortest_path(self.cycle)) + assert p[0][3] == [0, 1, 2, 3] + p = dict(nx.all_pairs_shortest_path(self.grid)) + validate_grid_path(4, 4, 1, 12, p[1][12]) + + def test_all_pairs_shortest_path_length(self): + l = dict(nx.all_pairs_shortest_path_length(self.cycle)) + assert l[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + l = dict(nx.all_pairs_shortest_path_length(self.grid)) + assert l[1][16] == 6 + + def test_predecessor_path(self): + G = nx.path_graph(4) + assert nx.predecessor(G, 0) == {0: [], 1: [0], 2: [1], 3: [2]} + assert nx.predecessor(G, 0, 3) == [2] + + def test_predecessor_cycle(self): + G = nx.cycle_graph(4) + pred = nx.predecessor(G, 0) + assert pred[0] == [] + assert pred[1] == [0] + assert pred[2] in [[1, 3], [3, 1]] + assert pred[3] == [0] + + def test_predecessor_cutoff(self): + G = nx.path_graph(4) + p = nx.predecessor(G, 0, 3) + assert 4 not in p + + def test_predecessor_target(self): + G = nx.path_graph(4) + p = nx.predecessor(G, 0, 3) + assert p == [2] + p = nx.predecessor(G, 0, 3, cutoff=2) + assert p == [] + p, s = nx.predecessor(G, 0, 3, return_seen=True) + assert p == [2] + assert s == 3 + p, s = nx.predecessor(G, 0, 3, cutoff=2, return_seen=True) + assert p == [] + assert s == -1 + + def test_predecessor_missing_source(self): + source = 8 + with pytest.raises(nx.NodeNotFound, match=f"Source {source} not in G"): + nx.predecessor(self.cycle, source) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_weighted.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_weighted.py new file mode 100644 index 0000000..30bb07e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/tests/test_weighted.py @@ -0,0 +1,972 @@ +import pytest + +import networkx as nx +from networkx.utils import pairwise + + +def validate_path(G, s, t, soln_len, path, weight="weight"): + assert path[0] == s + assert path[-1] == t + + if callable(weight): + weight_f = weight + else: + if G.is_multigraph(): + + def weight_f(u, v, d): + return min(e.get(weight, 1) for e in d.values()) + + else: + + def weight_f(u, v, d): + return d.get(weight, 1) + + computed = sum(weight_f(u, v, G[u][v]) for u, v in pairwise(path)) + assert soln_len == computed + + +def validate_length_path(G, s, t, soln_len, length, path, weight="weight"): + assert soln_len == length + validate_path(G, s, t, length, path, weight=weight) + + +class WeightedTestBase: + """Base class for test classes that test functions for computing + shortest paths in weighted graphs. + + """ + + def setup_method(self): + """Creates some graphs for use in the unit tests.""" + cnlti = nx.convert_node_labels_to_integers + self.grid = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + self.cycle = nx.cycle_graph(7) + self.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + self.XG = nx.DiGraph() + self.XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + self.MXG = nx.MultiDiGraph(self.XG) + self.MXG.add_edge("s", "u", weight=15) + self.XG2 = nx.DiGraph() + self.XG2.add_weighted_edges_from( + [ + [1, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 3, 1], + [1, 3, 50], + [1, 2, 100], + [2, 3, 100], + ] + ) + + self.XG3 = nx.Graph() + self.XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) + + self.XG4 = nx.Graph() + self.XG4.add_weighted_edges_from( + [ + [0, 1, 2], + [1, 2, 2], + [2, 3, 1], + [3, 4, 1], + [4, 5, 1], + [5, 6, 1], + [6, 7, 1], + [7, 0, 1], + ] + ) + self.MXG4 = nx.MultiGraph(self.XG4) + self.MXG4.add_edge(0, 1, weight=3) + self.G = nx.DiGraph() # no weights + self.G.add_edges_from( + [ + ("s", "u"), + ("s", "x"), + ("u", "v"), + ("u", "x"), + ("v", "y"), + ("x", "u"), + ("x", "v"), + ("x", "y"), + ("y", "s"), + ("y", "v"), + ] + ) + + +class TestWeightedPath(WeightedTestBase): + def test_dijkstra(self): + (D, P) = nx.single_source_dijkstra(self.XG, "s") + validate_path(self.XG, "s", "v", 9, P["v"]) + assert D["v"] == 9 + + validate_path( + self.XG, "s", "v", 9, nx.single_source_dijkstra_path(self.XG, "s")["v"] + ) + assert nx.single_source_dijkstra_path_length(self.XG, "s")["v"] == 9 + + validate_path( + self.XG, "s", "v", 9, nx.single_source_dijkstra(self.XG, "s")[1]["v"] + ) + validate_path( + self.MXG, "s", "v", 9, nx.single_source_dijkstra_path(self.MXG, "s")["v"] + ) + + GG = self.XG.to_undirected() + # make sure we get lower weight + # to_undirected might choose either edge with weight 2 or weight 3 + GG["u"]["x"]["weight"] = 2 + (D, P) = nx.single_source_dijkstra(GG, "s") + validate_path(GG, "s", "v", 8, P["v"]) + assert D["v"] == 8 # uses lower weight of 2 on u<->x edge + validate_path(GG, "s", "v", 8, nx.dijkstra_path(GG, "s", "v")) + assert nx.dijkstra_path_length(GG, "s", "v") == 8 + + validate_path(self.XG2, 1, 3, 4, nx.dijkstra_path(self.XG2, 1, 3)) + validate_path(self.XG3, 0, 3, 15, nx.dijkstra_path(self.XG3, 0, 3)) + assert nx.dijkstra_path_length(self.XG3, 0, 3) == 15 + validate_path(self.XG4, 0, 2, 4, nx.dijkstra_path(self.XG4, 0, 2)) + assert nx.dijkstra_path_length(self.XG4, 0, 2) == 4 + validate_path(self.MXG4, 0, 2, 4, nx.dijkstra_path(self.MXG4, 0, 2)) + validate_path( + self.G, "s", "v", 2, nx.single_source_dijkstra(self.G, "s", "v")[1] + ) + validate_path( + self.G, "s", "v", 2, nx.single_source_dijkstra(self.G, "s")[1]["v"] + ) + + validate_path(self.G, "s", "v", 2, nx.dijkstra_path(self.G, "s", "v")) + assert nx.dijkstra_path_length(self.G, "s", "v") == 2 + + # NetworkXError: node s not reachable from moon + pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path, self.G, "s", "moon") + pytest.raises(nx.NetworkXNoPath, nx.dijkstra_path_length, self.G, "s", "moon") + + validate_path(self.cycle, 0, 3, 3, nx.dijkstra_path(self.cycle, 0, 3)) + validate_path(self.cycle, 0, 4, 3, nx.dijkstra_path(self.cycle, 0, 4)) + + assert nx.single_source_dijkstra(self.cycle, 0, 0) == (0, [0]) + + def test_bidirectional_dijkstra(self): + validate_length_path( + self.XG, "s", "v", 9, *nx.bidirectional_dijkstra(self.XG, "s", "v") + ) + validate_length_path( + self.G, "s", "v", 2, *nx.bidirectional_dijkstra(self.G, "s", "v") + ) + validate_length_path( + self.cycle, 0, 3, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 3) + ) + validate_length_path( + self.cycle, 0, 4, 3, *nx.bidirectional_dijkstra(self.cycle, 0, 4) + ) + validate_length_path( + self.XG3, 0, 3, 15, *nx.bidirectional_dijkstra(self.XG3, 0, 3) + ) + validate_length_path( + self.XG4, 0, 2, 4, *nx.bidirectional_dijkstra(self.XG4, 0, 2) + ) + + # need more tests here + P = nx.single_source_dijkstra_path(self.XG, "s")["v"] + validate_path( + self.XG, + "s", + "v", + sum(self.XG[u][v]["weight"] for u, v in zip(P[:-1], P[1:])), + nx.dijkstra_path(self.XG, "s", "v"), + ) + + # check absent source + G = nx.path_graph(2) + pytest.raises(nx.NodeNotFound, nx.bidirectional_dijkstra, G, 3, 0) + + def test_weight_functions(self): + def heuristic(*z): + return sum(val**2 for val in z) + + def getpath(pred, v, s): + return [v] if v == s else getpath(pred, pred[v], s) + [v] + + def goldberg_radzik(g, s, t, weight="weight"): + pred, dist = nx.goldberg_radzik(g, s, weight=weight) + dist = dist[t] + return dist, getpath(pred, t, s) + + def astar(g, s, t, weight="weight"): + path = nx.astar_path(g, s, t, heuristic, weight=weight) + dist = nx.astar_path_length(g, s, t, heuristic, weight=weight) + return dist, path + + def vlp(G, s, t, l, F, w): + res = F(G, s, t, weight=w) + validate_length_path(G, s, t, l, *res, weight=w) + + G = self.cycle + s = 6 + t = 4 + path = [6] + list(range(t + 1)) + + def weight(u, v, _): + return 1 + v**2 + + length = sum(weight(u, v, None) for u, v in pairwise(path)) + vlp(G, s, t, length, nx.bidirectional_dijkstra, weight) + vlp(G, s, t, length, nx.single_source_dijkstra, weight) + vlp(G, s, t, length, nx.single_source_bellman_ford, weight) + vlp(G, s, t, length, goldberg_radzik, weight) + vlp(G, s, t, length, astar, weight) + + def weight(u, v, _): + return 2 ** (u * v) + + length = sum(weight(u, v, None) for u, v in pairwise(path)) + vlp(G, s, t, length, nx.bidirectional_dijkstra, weight) + vlp(G, s, t, length, nx.single_source_dijkstra, weight) + vlp(G, s, t, length, nx.single_source_bellman_ford, weight) + vlp(G, s, t, length, goldberg_radzik, weight) + vlp(G, s, t, length, astar, weight) + + def test_bidirectional_dijkstra_no_path(self): + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5, 6]) + path = nx.bidirectional_dijkstra(G, 1, 6) + + @pytest.mark.parametrize( + "fn", + ( + nx.dijkstra_path, + nx.dijkstra_path_length, + nx.single_source_dijkstra_path, + nx.single_source_dijkstra_path_length, + nx.single_source_dijkstra, + nx.dijkstra_predecessor_and_distance, + ), + ) + def test_absent_source(self, fn): + G = nx.path_graph(2) + with pytest.raises(nx.NodeNotFound): + fn(G, 3, 0) + # Test when source == target, which is handled specially by some functions + with pytest.raises(nx.NodeNotFound): + fn(G, 3, 3) + + def test_dijkstra_predecessor1(self): + G = nx.path_graph(4) + assert nx.dijkstra_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2]}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) + + def test_dijkstra_predecessor2(self): + # 4-cycle + G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)]) + pred, dist = nx.dijkstra_predecessor_and_distance(G, (0)) + assert pred[0] == [] + assert pred[1] == [0] + assert pred[2] in [[1, 3], [3, 1]] + assert pred[3] == [0] + assert dist == {0: 0, 1: 1, 2: 2, 3: 1} + + def test_dijkstra_predecessor3(self): + XG = nx.DiGraph() + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + (P, D) = nx.dijkstra_predecessor_and_distance(XG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + (P, D) = nx.dijkstra_predecessor_and_distance(XG, "s", cutoff=8) + assert "v" not in D + + def test_single_source_dijkstra_path_length(self): + pl = nx.single_source_dijkstra_path_length + assert dict(pl(self.MXG4, 0))[2] == 4 + spl = pl(self.MXG4, 0, cutoff=2) + assert 2 not in spl + + def test_bidirectional_dijkstra_multigraph(self): + G = nx.MultiGraph() + G.add_edge("a", "b", weight=10) + G.add_edge("a", "b", weight=100) + dp = nx.bidirectional_dijkstra(G, "a", "b") + assert dp == (10, ["a", "b"]) + + def test_dijkstra_pred_distance_multigraph(self): + G = nx.MultiGraph() + G.add_edge("a", "b", key="short", foo=5, weight=100) + G.add_edge("a", "b", key="long", bar=1, weight=110) + p, d = nx.dijkstra_predecessor_and_distance(G, "a") + assert p == {"a": [], "b": ["a"]} + assert d == {"a": 0, "b": 100} + + def test_negative_edge_cycle(self): + G = nx.cycle_graph(5, create_using=nx.DiGraph()) + assert not nx.negative_edge_cycle(G) + G.add_edge(8, 9, weight=-7) + G.add_edge(9, 8, weight=3) + graph_size = len(G) + assert nx.negative_edge_cycle(G) + assert graph_size == len(G) + pytest.raises(ValueError, nx.single_source_dijkstra_path_length, G, 8) + pytest.raises(ValueError, nx.single_source_dijkstra, G, 8) + pytest.raises(ValueError, nx.dijkstra_predecessor_and_distance, G, 8) + G.add_edge(9, 10) + pytest.raises(ValueError, nx.bidirectional_dijkstra, G, 8, 10) + G = nx.MultiDiGraph() + G.add_edge(2, 2, weight=-1) + assert nx.negative_edge_cycle(G) + + def test_negative_edge_cycle_empty(self): + G = nx.DiGraph() + assert not nx.negative_edge_cycle(G) + + def test_negative_edge_cycle_custom_weight_key(self): + d = nx.DiGraph() + d.add_edge("a", "b", w=-2) + d.add_edge("b", "a", w=-1) + assert nx.negative_edge_cycle(d, weight="w") + + def test_weight_function(self): + """Tests that a callable weight is interpreted as a weight + function instead of an edge attribute. + + """ + # Create a triangle in which the edge from node 0 to node 2 has + # a large weight and the other two edges have a small weight. + G = nx.complete_graph(3) + G.adj[0][2]["weight"] = 10 + G.adj[0][1]["weight"] = 1 + G.adj[1][2]["weight"] = 1 + + # The weight function will take the multiplicative inverse of + # the weights on the edges. This way, weights that were large + # before now become small and vice versa. + + def weight(u, v, d): + return 1 / d["weight"] + + # The shortest path from 0 to 2 using the actual weights on the + # edges should be [0, 1, 2]. + distance, path = nx.single_source_dijkstra(G, 0, 2) + assert distance == 2 + assert path == [0, 1, 2] + # However, with the above weight function, the shortest path + # should be [0, 2], since that has a very small weight. + distance, path = nx.single_source_dijkstra(G, 0, 2, weight=weight) + assert distance == 1 / 10 + assert path == [0, 2] + + def test_all_pairs_dijkstra_path(self): + cycle = nx.cycle_graph(7) + p = dict(nx.all_pairs_dijkstra_path(cycle)) + assert p[0][3] == [0, 1, 2, 3] + + cycle[1][2]["weight"] = 10 + p = dict(nx.all_pairs_dijkstra_path(cycle)) + assert p[0][3] == [0, 6, 5, 4, 3] + + def test_all_pairs_dijkstra_path_length(self): + cycle = nx.cycle_graph(7) + pl = dict(nx.all_pairs_dijkstra_path_length(cycle)) + assert pl[0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + + cycle[1][2]["weight"] = 10 + pl = dict(nx.all_pairs_dijkstra_path_length(cycle)) + assert pl[0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} + + def test_all_pairs_dijkstra(self): + cycle = nx.cycle_graph(7) + out = dict(nx.all_pairs_dijkstra(cycle)) + assert out[0][0] == {0: 0, 1: 1, 2: 2, 3: 3, 4: 3, 5: 2, 6: 1} + assert out[0][1][3] == [0, 1, 2, 3] + + cycle[1][2]["weight"] = 10 + out = dict(nx.all_pairs_dijkstra(cycle)) + assert out[0][0] == {0: 0, 1: 1, 2: 5, 3: 4, 4: 3, 5: 2, 6: 1} + assert out[0][1][3] == [0, 6, 5, 4, 3] + + +class TestDijkstraPathLength: + """Unit tests for the :func:`networkx.dijkstra_path_length` + function. + + """ + + def test_weight_function(self): + """Tests for computing the length of the shortest path using + Dijkstra's algorithm with a user-defined weight function. + + """ + # Create a triangle in which the edge from node 0 to node 2 has + # a large weight and the other two edges have a small weight. + G = nx.complete_graph(3) + G.adj[0][2]["weight"] = 10 + G.adj[0][1]["weight"] = 1 + G.adj[1][2]["weight"] = 1 + + # The weight function will take the multiplicative inverse of + # the weights on the edges. This way, weights that were large + # before now become small and vice versa. + + def weight(u, v, d): + return 1 / d["weight"] + + # The shortest path from 0 to 2 using the actual weights on the + # edges should be [0, 1, 2]. However, with the above weight + # function, the shortest path should be [0, 2], since that has a + # very small weight. + length = nx.dijkstra_path_length(G, 0, 2, weight=weight) + assert length == 1 / 10 + + +class TestMultiSourceDijkstra: + """Unit tests for the multi-source dialect of Dijkstra's shortest + path algorithms. + + """ + + def test_no_sources(self): + with pytest.raises(ValueError): + nx.multi_source_dijkstra(nx.Graph(), {}) + + def test_path_no_sources(self): + with pytest.raises(ValueError): + nx.multi_source_dijkstra_path(nx.Graph(), {}) + + def test_path_length_no_sources(self): + with pytest.raises(ValueError): + nx.multi_source_dijkstra_path_length(nx.Graph(), {}) + + @pytest.mark.parametrize( + "fn", + ( + nx.multi_source_dijkstra_path, + nx.multi_source_dijkstra_path_length, + nx.multi_source_dijkstra, + ), + ) + def test_absent_source(self, fn): + G = nx.path_graph(2) + with pytest.raises(nx.NodeNotFound): + fn(G, [3], 0) + with pytest.raises(nx.NodeNotFound): + fn(G, [3], 3) + + def test_two_sources(self): + edges = [(0, 1, 1), (1, 2, 1), (2, 3, 10), (3, 4, 1)] + G = nx.Graph() + G.add_weighted_edges_from(edges) + sources = {0, 4} + distances, paths = nx.multi_source_dijkstra(G, sources) + expected_distances = {0: 0, 1: 1, 2: 2, 3: 1, 4: 0} + expected_paths = {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [4, 3], 4: [4]} + assert distances == expected_distances + assert paths == expected_paths + + def test_simple_paths(self): + G = nx.path_graph(4) + lengths = nx.multi_source_dijkstra_path_length(G, [0]) + assert lengths == {n: n for n in G} + paths = nx.multi_source_dijkstra_path(G, [0]) + assert paths == {n: list(range(n + 1)) for n in G} + + +class TestBellmanFordAndGoldbergRadzik(WeightedTestBase): + def test_single_node_graph(self): + G = nx.DiGraph() + G.add_node(0) + assert nx.single_source_bellman_ford_path(G, 0) == {0: [0]} + assert nx.single_source_bellman_ford_path_length(G, 0) == {0: 0} + assert nx.single_source_bellman_ford(G, 0) == ({0: 0}, {0: [0]}) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ({0: []}, {0: 0}) + assert nx.goldberg_radzik(G, 0) == ({0: None}, {0: 0}) + + def test_absent_source_bellman_ford(self): + # the check is in _bellman_ford; this provides regression testing + # against later changes to "client" Bellman-Ford functions + G = nx.path_graph(2) + for fn in ( + nx.bellman_ford_predecessor_and_distance, + nx.bellman_ford_path, + nx.bellman_ford_path_length, + nx.single_source_bellman_ford_path, + nx.single_source_bellman_ford_path_length, + nx.single_source_bellman_ford, + ): + pytest.raises(nx.NodeNotFound, fn, G, 3, 0) + pytest.raises(nx.NodeNotFound, fn, G, 3, 3) + + def test_absent_source_goldberg_radzik(self): + with pytest.raises(nx.NodeNotFound): + G = nx.path_graph(2) + nx.goldberg_radzik(G, 3, 0) + + def test_negative_cycle_heuristic(self): + G = nx.DiGraph() + G.add_edge(0, 1, weight=-1) + G.add_edge(1, 2, weight=-1) + G.add_edge(2, 3, weight=-1) + G.add_edge(3, 0, weight=3) + assert not nx.negative_edge_cycle(G, heuristic=True) + G.add_edge(2, 0, weight=1.999) + assert nx.negative_edge_cycle(G, heuristic=True) + G.edges[2, 0]["weight"] = 2 + assert not nx.negative_edge_cycle(G, heuristic=True) + + def test_negative_cycle_consistency(self): + import random + + unif = random.uniform + for random_seed in range(2): # range(20): + random.seed(random_seed) + for density in [0.1, 0.9]: # .3, .7, .9]: + for N in [1, 10, 20]: # range(1, 60 - int(30 * density)): + for max_cost in [1, 90]: # [1, 10, 40, 90]: + G = nx.binomial_graph(N, density, seed=4, directed=True) + edges = ((u, v, unif(-1, max_cost)) for u, v in G.edges) + G.add_weighted_edges_from(edges) + + no_heuristic = nx.negative_edge_cycle(G, heuristic=False) + with_heuristic = nx.negative_edge_cycle(G, heuristic=True) + assert no_heuristic == with_heuristic + + def test_negative_cycle(self): + G = nx.cycle_graph(5, create_using=nx.DiGraph()) + G.add_edge(1, 2, weight=-7) + for i in range(5): + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i + ) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i + ) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i + ) + pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i) + G = nx.cycle_graph(5) # undirected Graph + G.add_edge(1, 2, weight=-3) + for i in range(5): + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, i + ) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, i + ) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, i) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, i + ) + pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, i) + G = nx.DiGraph([(1, 1, {"weight": -1})]) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, 1 + ) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1 + ) + pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1) + G = nx.MultiDiGraph([(1, 1, {"weight": -1})]) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford_path, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.single_source_bellman_ford_path_length, G, 1 + ) + pytest.raises(nx.NetworkXUnbounded, nx.single_source_bellman_ford, G, 1) + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1 + ) + pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1) + + def test_zero_cycle(self): + G = nx.cycle_graph(5, create_using=nx.DiGraph()) + G.add_edge(2, 3, weight=-4) + # check that zero cycle doesn't raise + nx.goldberg_radzik(G, 1) + nx.bellman_ford_predecessor_and_distance(G, 1) + + G.add_edge(2, 3, weight=-4.0001) + # check that negative cycle does raise + pytest.raises( + nx.NetworkXUnbounded, nx.bellman_ford_predecessor_and_distance, G, 1 + ) + pytest.raises(nx.NetworkXUnbounded, nx.goldberg_radzik, G, 1) + + def test_find_negative_cycle_longer_cycle(self): + G = nx.cycle_graph(5, create_using=nx.DiGraph()) + nx.add_cycle(G, [3, 5, 6, 7, 8, 9]) + G.add_edge(1, 2, weight=-30) + assert nx.find_negative_cycle(G, 1) == [0, 1, 2, 3, 4, 0] + assert nx.find_negative_cycle(G, 7) == [2, 3, 4, 0, 1, 2] + + def test_find_negative_cycle_no_cycle(self): + G = nx.path_graph(5, create_using=nx.DiGraph()) + pytest.raises(nx.NetworkXError, nx.find_negative_cycle, G, 3) + + def test_find_negative_cycle_single_edge(self): + G = nx.Graph() + G.add_edge(0, 1, weight=-1) + assert nx.find_negative_cycle(G, 1) == [1, 0, 1] + + def test_negative_weight(self): + G = nx.cycle_graph(5, create_using=nx.DiGraph()) + G.add_edge(1, 2, weight=-3) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 1, 2], + 3: [0, 1, 2, 3], + 4: [0, 1, 2, 3, 4], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: -2, + 3: -1, + 4: 0, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3], 4: [0, 1, 2, 3, 4]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2], 4: [3]}, + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 1, 3: 2, 4: 3}, + {0: 0, 1: 1, 2: -2, 3: -1, 4: 0}, + ) + + def test_not_connected(self): + G = nx.complete_graph(6) + G.add_edge(10, 11) + G.add_edge(10, 12) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 2], + 3: [0, 3], + 4: [0, 4], + 5: [0, 5], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: 1, + 3: 1, + 4: 1, + 5: 1, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) + + # not connected, with a component not containing the source that + # contains a negative cycle. + G = nx.complete_graph(6) + G.add_edges_from( + [ + ("A", "B", {"load": 3}), + ("B", "C", {"load": -10}), + ("C", "A", {"load": 2}), + ] + ) + assert nx.single_source_bellman_ford_path(G, 0, weight="load") == { + 0: [0], + 1: [0, 1], + 2: [0, 2], + 3: [0, 3], + 4: [0, 4], + 5: [0, 5], + } + assert nx.single_source_bellman_ford_path_length(G, 0, weight="load") == { + 0: 0, + 1: 1, + 2: 1, + 3: 1, + 4: 1, + 5: 1, + } + assert nx.single_source_bellman_ford(G, 0, weight="load") == ( + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + {0: [0], 1: [0, 1], 2: [0, 2], 3: [0, 3], 4: [0, 4], 5: [0, 5]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0, weight="load") == ( + {0: [], 1: [0], 2: [0], 3: [0], 4: [0], 5: [0]}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) + assert nx.goldberg_radzik(G, 0, weight="load") == ( + {0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, + {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1}, + ) + + def test_multigraph(self): + assert nx.bellman_ford_path(self.MXG, "s", "v") == ["s", "x", "u", "v"] + assert nx.bellman_ford_path_length(self.MXG, "s", "v") == 9 + assert nx.single_source_bellman_ford_path(self.MXG, "s")["v"] == [ + "s", + "x", + "u", + "v", + ] + assert nx.single_source_bellman_ford_path_length(self.MXG, "s")["v"] == 9 + D, P = nx.single_source_bellman_ford(self.MXG, "s", target="v") + assert D == 9 + assert P == ["s", "x", "u", "v"] + P, D = nx.bellman_ford_predecessor_and_distance(self.MXG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + P, D = nx.goldberg_radzik(self.MXG, "s") + assert P["v"] == "u" + assert D["v"] == 9 + assert nx.bellman_ford_path(self.MXG4, 0, 2) == [0, 1, 2] + assert nx.bellman_ford_path_length(self.MXG4, 0, 2) == 4 + assert nx.single_source_bellman_ford_path(self.MXG4, 0)[2] == [0, 1, 2] + assert nx.single_source_bellman_ford_path_length(self.MXG4, 0)[2] == 4 + D, P = nx.single_source_bellman_ford(self.MXG4, 0, target=2) + assert D == 4 + assert P == [0, 1, 2] + P, D = nx.bellman_ford_predecessor_and_distance(self.MXG4, 0) + assert P[2] == [1] + assert D[2] == 4 + P, D = nx.goldberg_radzik(self.MXG4, 0) + assert P[2] == 1 + assert D[2] == 4 + + def test_others(self): + assert nx.bellman_ford_path(self.XG, "s", "v") == ["s", "x", "u", "v"] + assert nx.bellman_ford_path_length(self.XG, "s", "v") == 9 + assert nx.single_source_bellman_ford_path(self.XG, "s")["v"] == [ + "s", + "x", + "u", + "v", + ] + assert nx.single_source_bellman_ford_path_length(self.XG, "s")["v"] == 9 + D, P = nx.single_source_bellman_ford(self.XG, "s", target="v") + assert D == 9 + assert P == ["s", "x", "u", "v"] + (P, D) = nx.bellman_ford_predecessor_and_distance(self.XG, "s") + assert P["v"] == ["u"] + assert D["v"] == 9 + (P, D) = nx.goldberg_radzik(self.XG, "s") + assert P["v"] == "u" + assert D["v"] == 9 + + def test_path_graph(self): + G = nx.path_graph(4) + assert nx.single_source_bellman_ford_path(G, 0) == { + 0: [0], + 1: [0, 1], + 2: [0, 1, 2], + 3: [0, 1, 2, 3], + } + assert nx.single_source_bellman_ford_path_length(G, 0) == { + 0: 0, + 1: 1, + 2: 2, + 3: 3, + } + assert nx.single_source_bellman_ford(G, 0) == ( + {0: 0, 1: 1, 2: 2, 3: 3}, + {0: [0], 1: [0, 1], 2: [0, 1, 2], 3: [0, 1, 2, 3]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 0) == ( + {0: [], 1: [0], 2: [1], 3: [2]}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) + assert nx.goldberg_radzik(G, 0) == ( + {0: None, 1: 0, 2: 1, 3: 2}, + {0: 0, 1: 1, 2: 2, 3: 3}, + ) + assert nx.single_source_bellman_ford_path(G, 3) == { + 0: [3, 2, 1, 0], + 1: [3, 2, 1], + 2: [3, 2], + 3: [3], + } + assert nx.single_source_bellman_ford_path_length(G, 3) == { + 0: 3, + 1: 2, + 2: 1, + 3: 0, + } + assert nx.single_source_bellman_ford(G, 3) == ( + {0: 3, 1: 2, 2: 1, 3: 0}, + {0: [3, 2, 1, 0], 1: [3, 2, 1], 2: [3, 2], 3: [3]}, + ) + assert nx.bellman_ford_predecessor_and_distance(G, 3) == ( + {0: [1], 1: [2], 2: [3], 3: []}, + {0: 3, 1: 2, 2: 1, 3: 0}, + ) + assert nx.goldberg_radzik(G, 3) == ( + {0: 1, 1: 2, 2: 3, 3: None}, + {0: 3, 1: 2, 2: 1, 3: 0}, + ) + + def test_4_cycle(self): + # 4-cycle + G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0)]) + dist, path = nx.single_source_bellman_ford(G, 0) + assert dist == {0: 0, 1: 1, 2: 2, 3: 1} + assert path[0] == [0] + assert path[1] == [0, 1] + assert path[2] in [[0, 1, 2], [0, 3, 2]] + assert path[3] == [0, 3] + + pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0) + assert pred[0] == [] + assert pred[1] == [0] + assert pred[2] in [[1, 3], [3, 1]] + assert pred[3] == [0] + assert dist == {0: 0, 1: 1, 2: 2, 3: 1} + + pred, dist = nx.goldberg_radzik(G, 0) + assert pred[0] is None + assert pred[1] == 0 + assert pred[2] in [1, 3] + assert pred[3] == 0 + assert dist == {0: 0, 1: 1, 2: 2, 3: 1} + + def test_negative_weight_bf_path(self): + G = nx.DiGraph() + G.add_nodes_from("abcd") + G.add_edge("a", "d", weight=0) + G.add_edge("a", "b", weight=1) + G.add_edge("b", "c", weight=-3) + G.add_edge("c", "d", weight=1) + + assert nx.bellman_ford_path(G, "a", "d") == ["a", "b", "c", "d"] + assert nx.bellman_ford_path_length(G, "a", "d") == -1 + + def test_zero_cycle_smoke(self): + D = nx.DiGraph() + D.add_weighted_edges_from([(0, 1, 1), (1, 2, 1), (2, 3, 1), (3, 1, -2)]) + + nx.bellman_ford_path(D, 1, 3) + nx.dijkstra_path(D, 1, 3) + nx.bidirectional_dijkstra(D, 1, 3) + # FIXME nx.goldberg_radzik(D, 1) + + +class TestJohnsonAlgorithm(WeightedTestBase): + def test_single_node_graph(self): + G = nx.DiGraph() + G.add_node(0) + assert nx.johnson(G) == {0: {0: [0]}} + + def test_negative_cycle(self): + G = nx.DiGraph() + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) + pytest.raises(nx.NetworkXUnbounded, nx.johnson, G) + G = nx.Graph() + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) + pytest.raises(nx.NetworkXUnbounded, nx.johnson, G) + + def test_negative_weights(self): + G = nx.DiGraph() + G.add_weighted_edges_from( + [("0", "3", 3), ("0", "1", -5), ("0", "2", 2), ("1", "2", 4), ("2", "3", 1)] + ) + paths = nx.johnson(G) + assert paths == { + "1": {"1": ["1"], "3": ["1", "2", "3"], "2": ["1", "2"]}, + "0": { + "1": ["0", "1"], + "0": ["0"], + "3": ["0", "1", "2", "3"], + "2": ["0", "1", "2"], + }, + "3": {"3": ["3"]}, + "2": {"3": ["2", "3"], "2": ["2"]}, + } + + def test_unweighted_graph(self): + G = nx.Graph() + G.add_edges_from([(1, 0), (2, 1)]) + H = G.copy() + nx.set_edge_attributes(H, values=1, name="weight") + assert nx.johnson(G) == nx.johnson(H) + + def test_partially_weighted_graph_with_negative_edges(self): + G = nx.DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 0), (1, 0)]) + G[1][0]["weight"] = -2 + G[0][1]["weight"] = 3 + G[1][2]["weight"] = -4 + + H = G.copy() + H[2][0]["weight"] = 1 + + I = G.copy() + I[2][0]["weight"] = 8 + + assert nx.johnson(G) == nx.johnson(H) + assert nx.johnson(G) != nx.johnson(I) + + def test_graphs(self): + validate_path(self.XG, "s", "v", 9, nx.johnson(self.XG)["s"]["v"]) + validate_path(self.MXG, "s", "v", 9, nx.johnson(self.MXG)["s"]["v"]) + validate_path(self.XG2, 1, 3, 4, nx.johnson(self.XG2)[1][3]) + validate_path(self.XG3, 0, 3, 15, nx.johnson(self.XG3)[0][3]) + validate_path(self.XG4, 0, 2, 4, nx.johnson(self.XG4)[0][2]) + validate_path(self.MXG4, 0, 2, 4, nx.johnson(self.MXG4)[0][2]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/unweighted.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/unweighted.py new file mode 100644 index 0000000..2a53031 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/unweighted.py @@ -0,0 +1,562 @@ +""" +Shortest path algorithms for unweighted graphs. +""" + +import networkx as nx + +__all__ = [ + "bidirectional_shortest_path", + "single_source_shortest_path", + "single_source_shortest_path_length", + "single_target_shortest_path", + "single_target_shortest_path_length", + "all_pairs_shortest_path", + "all_pairs_shortest_path_length", + "predecessor", +] + + +@nx._dispatchable +def single_source_shortest_path_length(G, source, cutoff=None): + """Compute the shortest path lengths from `source` to all reachable nodes in `G`. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path + + cutoff : integer, optional + Depth to stop the search. Only paths of length <= `cutoff` are returned. + + Returns + ------- + lengths : dict + Dict keyed by node to shortest path length to `source`. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.single_source_shortest_path_length(G, 0) + {0: 0, 1: 1, 2: 2, 3: 3, 4: 4} + + See Also + -------- + :any:`shortest_path_length` : + Shortest path length with specifiable source, target, and weight. + :any:`single_source_dijkstra_path_length` : + Shortest weighted path length from source with Dijkstra algorithm. + :any:`single_source_bellman_ford_path_length` : + Shortest weighted path length from source with Bellman-Ford algorithm. + """ + if source not in G: + raise nx.NodeNotFound(f"Source {source} is not in G") + if cutoff is None: + cutoff = float("inf") + nextlevel = [source] + return dict(_single_shortest_path_length(G._adj, nextlevel, cutoff)) + + +def _single_shortest_path_length(adj, firstlevel, cutoff): + """Yields (node, level) in a breadth first search + + Shortest Path Length helper function + Parameters + ---------- + adj : dict + Adjacency dict or view + firstlevel : list + starting nodes, e.g. [source] or [target] + cutoff : int or float + level at which we stop the process + """ + seen = set(firstlevel) + nextlevel = firstlevel + level = 0 + n = len(adj) + for v in nextlevel: + yield (v, level) + while nextlevel and cutoff > level: + level += 1 + thislevel = nextlevel + nextlevel = [] + for v in thislevel: + for w in adj[v]: + if w not in seen: + seen.add(w) + nextlevel.append(w) + yield (w, level) + if len(seen) == n: + return + + +@nx._dispatchable +def single_target_shortest_path_length(G, target, cutoff=None): + """Compute the shortest path lengths to target from all reachable nodes. + + Parameters + ---------- + G : NetworkX graph + + target : node + Target node for path + + cutoff : integer, optional + Depth to stop the search. Only paths of length <= cutoff are returned. + + Returns + ------- + lengths : dictionary + Dictionary, keyed by source, of shortest path lengths. + + Examples + -------- + >>> G = nx.path_graph(5, create_using=nx.DiGraph()) + >>> length = nx.single_target_shortest_path_length(G, 4) + >>> length[0] + 4 + >>> for node in range(5): + ... print(f"{node}: {length[node]}") + 0: 4 + 1: 3 + 2: 2 + 3: 1 + 4: 0 + + See Also + -------- + single_source_shortest_path_length, shortest_path_length + """ + if target not in G: + raise nx.NodeNotFound(f"Target {target} is not in G") + if cutoff is None: + cutoff = float("inf") + # handle either directed or undirected + adj = G._pred if G.is_directed() else G._adj + nextlevel = [target] + return dict(_single_shortest_path_length(adj, nextlevel, cutoff)) + + +@nx._dispatchable +def all_pairs_shortest_path_length(G, cutoff=None): + """Computes the shortest path lengths between all nodes in `G`. + + Parameters + ---------- + G : NetworkX graph + + cutoff : integer, optional + Depth at which to stop the search. Only paths of length at most + `cutoff` are returned. + + Returns + ------- + lengths : iterator + (source, dictionary) iterator with dictionary keyed by target and + shortest path length as the key value. + + Notes + ----- + The iterator returned only has reachable node pairs. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length = dict(nx.all_pairs_shortest_path_length(G)) + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"1 - {node}: {length[1][node]}") + 1 - 0: 1 + 1 - 1: 0 + 1 - 2: 1 + 1 - 3: 2 + 1 - 4: 3 + >>> length[3][2] + 1 + >>> length[2][2] + 0 + + """ + length = single_source_shortest_path_length + # TODO This can be trivially parallelized. + for n in G: + yield (n, length(G, n, cutoff=cutoff)) + + +@nx._dispatchable +def bidirectional_shortest_path(G, source, target): + """Returns a list of nodes in a shortest path between source and target. + + Parameters + ---------- + G : NetworkX graph + + source : node label + starting node for path + + target : node label + ending node for path + + Returns + ------- + path: list + List of nodes in a path from source to target. + + Raises + ------ + NetworkXNoPath + If no path exists between source and target. + + Examples + -------- + >>> G = nx.Graph() + >>> nx.add_path(G, [0, 1, 2, 3, 0, 4, 5, 6, 7, 4]) + >>> nx.bidirectional_shortest_path(G, 2, 6) + [2, 1, 0, 4, 5, 6] + + See Also + -------- + shortest_path + + Notes + ----- + This algorithm is used by shortest_path(G, source, target). + """ + + if source not in G: + raise nx.NodeNotFound(f"Source {source} is not in G") + + if target not in G: + raise nx.NodeNotFound(f"Target {target} is not in G") + + # call helper to do the real work + results = _bidirectional_pred_succ(G, source, target) + pred, succ, w = results + + # build path from pred+w+succ + path = [] + # from source to w + while w is not None: + path.append(w) + w = pred[w] + path.reverse() + # from w to target + w = succ[path[-1]] + while w is not None: + path.append(w) + w = succ[w] + + return path + + +def _bidirectional_pred_succ(G, source, target): + """Bidirectional shortest path helper. + + Returns (pred, succ, w) where + pred is a dictionary of predecessors from w to the source, and + succ is a dictionary of successors from w to the target. + """ + # does BFS from both source and target and meets in the middle + if target == source: + return ({target: None}, {source: None}, source) + + # handle either directed or undirected + if G.is_directed(): + Gpred = G.pred + Gsucc = G.succ + else: + Gpred = G.adj + Gsucc = G.adj + + # predecessor and successors in search + pred = {source: None} + succ = {target: None} + + # initialize fringes, start with forward + forward_fringe = [source] + reverse_fringe = [target] + + while forward_fringe and reverse_fringe: + if len(forward_fringe) <= len(reverse_fringe): + this_level = forward_fringe + forward_fringe = [] + for v in this_level: + for w in Gsucc[v]: + if w not in pred: + forward_fringe.append(w) + pred[w] = v + if w in succ: # path found + return pred, succ, w + else: + this_level = reverse_fringe + reverse_fringe = [] + for v in this_level: + for w in Gpred[v]: + if w not in succ: + succ[w] = v + reverse_fringe.append(w) + if w in pred: # found path + return pred, succ, w + + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") + + +@nx._dispatchable +def single_source_shortest_path(G, source, cutoff=None): + """Compute shortest path between source + and all other nodes reachable from source. + + Parameters + ---------- + G : NetworkX graph + + source : node label + Starting node for path + + cutoff : integer, optional + Depth to stop the search. Only paths of length <= cutoff are returned. + + Returns + ------- + paths : dictionary + Dictionary, keyed by target, of shortest paths. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> path = nx.single_source_shortest_path(G, 0) + >>> path[4] + [0, 1, 2, 3, 4] + + Notes + ----- + The shortest path is not necessarily unique. So there can be multiple + paths between the source and each target node, all of which have the + same 'shortest' length. For each target node, this function returns + only one of those paths. + + See Also + -------- + shortest_path + """ + if source not in G: + raise nx.NodeNotFound(f"Source {source} not in G") + + def join(p1, p2): + return p1 + p2 + + if cutoff is None: + cutoff = float("inf") + nextlevel = {source: 1} # list of nodes to check at next level + paths = {source: [source]} # paths dictionary (paths to key from source) + return dict(_single_shortest_path(G.adj, nextlevel, paths, cutoff, join)) + + +def _single_shortest_path(adj, firstlevel, paths, cutoff, join): + """Returns shortest paths + + Shortest Path helper function + Parameters + ---------- + adj : dict + Adjacency dict or view + firstlevel : dict + starting nodes, e.g. {source: 1} or {target: 1} + paths : dict + paths for starting nodes, e.g. {source: [source]} + cutoff : int or float + level at which we stop the process + join : function + function to construct a path from two partial paths. Requires two + list inputs `p1` and `p2`, and returns a list. Usually returns + `p1 + p2` (forward from source) or `p2 + p1` (backward from target) + """ + level = 0 # the current level + nextlevel = firstlevel + while nextlevel and cutoff > level: + thislevel = nextlevel + nextlevel = {} + for v in thislevel: + for w in adj[v]: + if w not in paths: + paths[w] = join(paths[v], [w]) + nextlevel[w] = 1 + level += 1 + return paths + + +@nx._dispatchable +def single_target_shortest_path(G, target, cutoff=None): + """Compute shortest path to target from all nodes that reach target. + + Parameters + ---------- + G : NetworkX graph + + target : node label + Target node for path + + cutoff : integer, optional + Depth to stop the search. Only paths of length <= cutoff are returned. + + Returns + ------- + paths : dictionary + Dictionary, keyed by target, of shortest paths. + + Examples + -------- + >>> G = nx.path_graph(5, create_using=nx.DiGraph()) + >>> path = nx.single_target_shortest_path(G, 4) + >>> path[0] + [0, 1, 2, 3, 4] + + Notes + ----- + The shortest path is not necessarily unique. So there can be multiple + paths between the source and each target node, all of which have the + same 'shortest' length. For each target node, this function returns + only one of those paths. + + See Also + -------- + shortest_path, single_source_shortest_path + """ + if target not in G: + raise nx.NodeNotFound(f"Target {target} not in G") + + def join(p1, p2): + return p2 + p1 + + # handle undirected graphs + adj = G.pred if G.is_directed() else G.adj + if cutoff is None: + cutoff = float("inf") + nextlevel = {target: 1} # list of nodes to check at next level + paths = {target: [target]} # paths dictionary (paths to key from source) + return dict(_single_shortest_path(adj, nextlevel, paths, cutoff, join)) + + +@nx._dispatchable +def all_pairs_shortest_path(G, cutoff=None): + """Compute shortest paths between all nodes. + + Parameters + ---------- + G : NetworkX graph + + cutoff : integer, optional + Depth at which to stop the search. Only paths of length at most + `cutoff` are returned. + + Returns + ------- + paths : iterator + Dictionary, keyed by source and target, of shortest paths. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> path = dict(nx.all_pairs_shortest_path(G)) + >>> print(path[0][4]) + [0, 1, 2, 3, 4] + + Notes + ----- + There may be multiple shortest paths with the same length between + two nodes. For each pair, this function returns only one of those paths. + + See Also + -------- + floyd_warshall + all_pairs_all_shortest_paths + + """ + # TODO This can be trivially parallelized. + for n in G: + yield (n, single_source_shortest_path(G, n, cutoff=cutoff)) + + +@nx._dispatchable +def predecessor(G, source, target=None, cutoff=None, return_seen=None): + """Returns dict of predecessors for the path from source to all nodes in G. + + Parameters + ---------- + G : NetworkX graph + + source : node label + Starting node for path + + target : node label, optional + Ending node for path. If provided only predecessors between + source and target are returned + + cutoff : integer, optional + Depth to stop the search. Only paths of length <= cutoff are returned. + + return_seen : bool, optional (default=None) + Whether to return a dictionary, keyed by node, of the level (number of + hops) to reach the node (as seen during breadth-first-search). + + Returns + ------- + pred : dictionary + Dictionary, keyed by node, of predecessors in the shortest path. + + + (pred, seen): tuple of dictionaries + If `return_seen` argument is set to `True`, then a tuple of dictionaries + is returned. The first element is the dictionary, keyed by node, of + predecessors in the shortest path. The second element is the dictionary, + keyed by node, of the level (number of hops) to reach the node (as seen + during breadth-first-search). + + Examples + -------- + >>> G = nx.path_graph(4) + >>> list(G) + [0, 1, 2, 3] + >>> nx.predecessor(G, 0) + {0: [], 1: [0], 2: [1], 3: [2]} + >>> nx.predecessor(G, 0, return_seen=True) + ({0: [], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3}) + + + """ + if source not in G: + raise nx.NodeNotFound(f"Source {source} not in G") + + level = 0 # the current level + nextlevel = [source] # list of nodes to check at next level + seen = {source: level} # level (number of hops) when seen in BFS + pred = {source: []} # predecessor dictionary + while nextlevel: + level = level + 1 + thislevel = nextlevel + nextlevel = [] + for v in thislevel: + for w in G[v]: + if w not in seen: + pred[w] = [v] + seen[w] = level + nextlevel.append(w) + elif seen[w] == level: # add v to predecessor list if it + pred[w].append(v) # is at the correct level + if cutoff and cutoff <= level: + break + + if target is not None: + if return_seen: + if target not in pred: + return ([], -1) # No predecessor + return (pred[target], seen[target]) + else: + if target not in pred: + return [] # No predecessor + return pred[target] + else: + if return_seen: + return (pred, seen) + else: + return pred diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/weighted.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/weighted.py new file mode 100644 index 0000000..0c69d4b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/shortest_paths/weighted.py @@ -0,0 +1,2516 @@ +""" +Shortest path algorithms for weighted graphs. +""" + +from collections import deque +from heapq import heappop, heappush +from itertools import count + +import networkx as nx +from networkx.algorithms.shortest_paths.generic import _build_paths_from_predecessors + +__all__ = [ + "dijkstra_path", + "dijkstra_path_length", + "bidirectional_dijkstra", + "single_source_dijkstra", + "single_source_dijkstra_path", + "single_source_dijkstra_path_length", + "multi_source_dijkstra", + "multi_source_dijkstra_path", + "multi_source_dijkstra_path_length", + "all_pairs_dijkstra", + "all_pairs_dijkstra_path", + "all_pairs_dijkstra_path_length", + "dijkstra_predecessor_and_distance", + "bellman_ford_path", + "bellman_ford_path_length", + "single_source_bellman_ford", + "single_source_bellman_ford_path", + "single_source_bellman_ford_path_length", + "all_pairs_bellman_ford_path", + "all_pairs_bellman_ford_path_length", + "bellman_ford_predecessor_and_distance", + "negative_edge_cycle", + "find_negative_cycle", + "goldberg_radzik", + "johnson", +] + + +def _weight_function(G, weight): + """Returns a function that returns the weight of an edge. + + The returned function is specifically suitable for input to + functions :func:`_dijkstra` and :func:`_bellman_ford_relaxation`. + + Parameters + ---------- + G : NetworkX graph. + + weight : string or function + If it is callable, `weight` itself is returned. If it is a string, + it is assumed to be the name of the edge attribute that represents + the weight of an edge. In that case, a function is returned that + gets the edge weight according to the specified edge attribute. + + Returns + ------- + function + This function returns a callable that accepts exactly three inputs: + a node, an node adjacent to the first one, and the edge attribute + dictionary for the eedge joining those nodes. That function returns + a number representing the weight of an edge. + + If `G` is a multigraph, and `weight` is not callable, the + minimum edge weight over all parallel edges is returned. If any edge + does not have an attribute with key `weight`, it is assumed to + have weight one. + + """ + if callable(weight): + return weight + # If the weight keyword argument is not callable, we assume it is a + # string representing the edge attribute containing the weight of + # the edge. + if G.is_multigraph(): + return lambda u, v, d: min(attr.get(weight, 1) for attr in d.values()) + return lambda u, v, data: data.get(weight, 1) + + +@nx._dispatchable(edge_attrs="weight") +def dijkstra_path(G, source, target, weight="weight"): + """Returns the shortest weighted path from source to target in G. + + Uses Dijkstra's Method to compute the shortest weighted path + between two nodes in a graph. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node + + target : node + Ending node + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + path : list + List of nodes in a shortest path. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + NetworkXNoPath + If no path exists between source and target. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> print(nx.dijkstra_path(G, 0, 4)) + [0, 1, 2, 3, 4] + + Find edges of shortest path in Multigraph + + >>> G = nx.MultiDiGraph() + >>> G.add_weighted_edges_from([(1, 2, 0.75), (1, 2, 0.5), (2, 3, 0.5), (1, 3, 1.5)]) + >>> nodes = nx.dijkstra_path(G, 1, 3) + >>> edges = nx.utils.pairwise(nodes) + >>> list( + ... (u, v, min(G[u][v], key=lambda k: G[u][v][k].get("weight", 1))) + ... for u, v in edges + ... ) + [(1, 2, 1), (2, 3, 0)] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + The weight function can be used to include node weights. + + >>> def func(u, v, d): + ... node_u_wt = G.nodes[u].get("node_weight", 1) + ... node_v_wt = G.nodes[v].get("node_weight", 1) + ... edge_wt = d.get("weight", 1) + ... return node_u_wt / 2 + node_v_wt / 2 + edge_wt + + In this example we take the average of start and end node + weights of an edge and add it to the weight of the edge. + + The function :func:`single_source_dijkstra` computes both + path and length-of-path if you need both, use that. + + See Also + -------- + bidirectional_dijkstra + bellman_ford_path + single_source_dijkstra + """ + (length, path) = single_source_dijkstra(G, source, target=target, weight=weight) + return path + + +@nx._dispatchable(edge_attrs="weight") +def dijkstra_path_length(G, source, target, weight="weight"): + """Returns the shortest weighted path length in G from source to target. + + Uses Dijkstra's Method to compute the shortest weighted path length + between two nodes in a graph. + + Parameters + ---------- + G : NetworkX graph + + source : node label + starting node for path + + target : node label + ending node for path + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + length : number + Shortest path length. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + NetworkXNoPath + If no path exists between source and target. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.dijkstra_path_length(G, 0, 4) + 4 + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + The function :func:`single_source_dijkstra` computes both + path and length-of-path if you need both, use that. + + See Also + -------- + bidirectional_dijkstra + bellman_ford_path_length + single_source_dijkstra + + """ + if source not in G: + raise nx.NodeNotFound(f"Node {source} not found in graph") + if source == target: + return 0 + weight = _weight_function(G, weight) + length = _dijkstra(G, source, weight, target=target) + try: + return length[target] + except KeyError as err: + raise nx.NetworkXNoPath(f"Node {target} not reachable from {source}") from err + + +@nx._dispatchable(edge_attrs="weight") +def single_source_dijkstra_path(G, source, cutoff=None, weight="weight"): + """Find shortest weighted paths in G from a source node. + + Compute shortest path between source and all other reachable + nodes for a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path. + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + paths : dictionary + Dictionary of shortest path lengths keyed by target. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> path = nx.single_source_dijkstra_path(G, 0) + >>> path[4] + [0, 1, 2, 3, 4] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + See Also + -------- + single_source_dijkstra, single_source_bellman_ford + + """ + return multi_source_dijkstra_path(G, {source}, cutoff=cutoff, weight=weight) + + +@nx._dispatchable(edge_attrs="weight") +def single_source_dijkstra_path_length(G, source, cutoff=None, weight="weight"): + """Find shortest weighted path lengths in G from a source node. + + Compute the shortest path length between source and all other + reachable nodes for a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + source : node label + Starting node for path + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + length : dict + Dict keyed by node to shortest path length from source. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length = nx.single_source_dijkstra_path_length(G, 0) + >>> length[4] + 4 + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"{node}: {length[node]}") + 0: 0 + 1: 1 + 2: 2 + 3: 3 + 4: 4 + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + See Also + -------- + single_source_dijkstra, single_source_bellman_ford_path_length + + """ + return multi_source_dijkstra_path_length(G, {source}, cutoff=cutoff, weight=weight) + + +@nx._dispatchable(edge_attrs="weight") +def single_source_dijkstra(G, source, target=None, cutoff=None, weight="weight"): + """Find shortest weighted paths and lengths from a source node. + + Compute the shortest path length between source and all other + reachable nodes for a weighted graph. + + Uses Dijkstra's algorithm to compute shortest paths and lengths + between a source and all other reachable nodes in a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + source : node label + Starting node for path + + target : node label, optional + Ending node for path + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + distance, path : pair of dictionaries, or numeric and list. + If target is None, paths and lengths to all nodes are computed. + The return value is a tuple of two dictionaries keyed by target nodes. + The first dictionary stores distance to each target node. + The second stores the path to each target node. + If target is not None, returns a tuple (distance, path), where + distance is the distance from source to target and path is a list + representing the path from source to target. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length, path = nx.single_source_dijkstra(G, 0) + >>> length[4] + 4 + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"{node}: {length[node]}") + 0: 0 + 1: 1 + 2: 2 + 3: 3 + 4: 4 + >>> path[4] + [0, 1, 2, 3, 4] + >>> length, path = nx.single_source_dijkstra(G, 0, 1) + >>> length + 1 + >>> path + [0, 1] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + Based on the Python cookbook recipe (119466) at + https://code.activestate.com/recipes/119466/ + + This algorithm is not guaranteed to work if edge weights + are negative or are floating point numbers + (overflows and roundoff errors can cause problems). + + See Also + -------- + single_source_dijkstra_path + single_source_dijkstra_path_length + single_source_bellman_ford + """ + return multi_source_dijkstra( + G, {source}, cutoff=cutoff, target=target, weight=weight + ) + + +@nx._dispatchable(edge_attrs="weight") +def multi_source_dijkstra_path(G, sources, cutoff=None, weight="weight"): + """Find shortest weighted paths in G from a given set of source + nodes. + + Compute shortest path between any of the source nodes and all other + reachable nodes for a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + sources : non-empty set of nodes + Starting nodes for paths. If this is just a set containing a + single node, then all paths computed by this function will start + from that node. If there are two or more nodes in the set, the + computed paths may begin from any one of the start nodes. + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + paths : dictionary + Dictionary of shortest paths keyed by target. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> path = nx.multi_source_dijkstra_path(G, {0, 4}) + >>> path[1] + [0, 1] + >>> path[3] + [4, 3] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + Raises + ------ + ValueError + If `sources` is empty. + NodeNotFound + If any of `sources` is not in `G`. + + See Also + -------- + multi_source_dijkstra, multi_source_bellman_ford + + """ + length, path = multi_source_dijkstra(G, sources, cutoff=cutoff, weight=weight) + return path + + +@nx._dispatchable(edge_attrs="weight") +def multi_source_dijkstra_path_length(G, sources, cutoff=None, weight="weight"): + """Find shortest weighted path lengths in G from a given set of + source nodes. + + Compute the shortest path length between any of the source nodes and + all other reachable nodes for a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + sources : non-empty set of nodes + Starting nodes for paths. If this is just a set containing a + single node, then all paths computed by this function will start + from that node. If there are two or more nodes in the set, the + computed paths may begin from any one of the start nodes. + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + length : dict + Dict keyed by node to shortest path length to nearest source. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length = nx.multi_source_dijkstra_path_length(G, {0, 4}) + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"{node}: {length[node]}") + 0: 0 + 1: 1 + 2: 2 + 3: 1 + 4: 0 + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + Raises + ------ + ValueError + If `sources` is empty. + NodeNotFound + If any of `sources` is not in `G`. + + See Also + -------- + multi_source_dijkstra + + """ + if not sources: + raise ValueError("sources must not be empty") + for s in sources: + if s not in G: + raise nx.NodeNotFound(f"Node {s} not found in graph") + weight = _weight_function(G, weight) + return _dijkstra_multisource(G, sources, weight, cutoff=cutoff) + + +@nx._dispatchable(edge_attrs="weight") +def multi_source_dijkstra(G, sources, target=None, cutoff=None, weight="weight"): + """Find shortest weighted paths and lengths from a given set of + source nodes. + + Uses Dijkstra's algorithm to compute the shortest paths and lengths + between one of the source nodes and the given `target`, or all other + reachable nodes if not specified, for a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + sources : non-empty set of nodes + Starting nodes for paths. If this is just a set containing a + single node, then all paths computed by this function will start + from that node. If there are two or more nodes in the set, the + computed paths may begin from any one of the start nodes. + + target : node label, optional + Ending node for path + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + distance, path : pair of dictionaries, or numeric and list + If target is None, returns a tuple of two dictionaries keyed by node. + The first dictionary stores distance from one of the source nodes. + The second stores the path from one of the sources to that node. + If target is not None, returns a tuple of (distance, path) where + distance is the distance from source to target and path is a list + representing the path from source to target. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length, path = nx.multi_source_dijkstra(G, {0, 4}) + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"{node}: {length[node]}") + 0: 0 + 1: 1 + 2: 2 + 3: 1 + 4: 0 + >>> path[1] + [0, 1] + >>> path[3] + [4, 3] + + >>> length, path = nx.multi_source_dijkstra(G, {0, 4}, 1) + >>> length + 1 + >>> path + [0, 1] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + Based on the Python cookbook recipe (119466) at + https://code.activestate.com/recipes/119466/ + + This algorithm is not guaranteed to work if edge weights + are negative or are floating point numbers + (overflows and roundoff errors can cause problems). + + Raises + ------ + ValueError + If `sources` is empty. + NodeNotFound + If any of `sources` is not in `G`. + + See Also + -------- + multi_source_dijkstra_path + multi_source_dijkstra_path_length + + """ + if not sources: + raise ValueError("sources must not be empty") + for s in sources: + if s not in G: + raise nx.NodeNotFound(f"Node {s} not found in graph") + if target in sources: + return (0, [target]) + weight = _weight_function(G, weight) + paths = {source: [source] for source in sources} # dictionary of paths + dist = _dijkstra_multisource( + G, sources, weight, paths=paths, cutoff=cutoff, target=target + ) + if target is None: + return (dist, paths) + try: + return (dist[target], paths[target]) + except KeyError as err: + raise nx.NetworkXNoPath(f"No path to {target}.") from err + + +def _dijkstra(G, source, weight, pred=None, paths=None, cutoff=None, target=None): + """Uses Dijkstra's algorithm to find shortest weighted paths from a + single source. + + This is a convenience function for :func:`_dijkstra_multisource` + with all the arguments the same, except the keyword argument + `sources` set to ``[source]``. + + """ + return _dijkstra_multisource( + G, [source], weight, pred=pred, paths=paths, cutoff=cutoff, target=target + ) + + +def _dijkstra_multisource( + G, sources, weight, pred=None, paths=None, cutoff=None, target=None +): + """Uses Dijkstra's algorithm to find shortest weighted paths + + Parameters + ---------- + G : NetworkX graph + + sources : non-empty iterable of nodes + Starting nodes for paths. If this is just an iterable containing + a single node, then all paths computed by this function will + start from that node. If there are two or more nodes in this + iterable, the computed paths may begin from any one of the start + nodes. + + weight: function + Function with (u, v, data) input that returns that edge's weight + or None to indicate a hidden edge + + pred: dict of lists, optional(default=None) + dict to store a list of predecessors keyed by that node + If None, predecessors are not stored. + + paths: dict, optional (default=None) + dict to store the path list from source to each node, keyed by node. + If None, paths are not stored. + + target : node label, optional + Ending node for path. Search is halted when target is found. + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + Returns + ------- + distance : dictionary + A mapping from node to shortest distance to that node from one + of the source nodes. + + Raises + ------ + NodeNotFound + If any of `sources` is not in `G`. + + Notes + ----- + The optional predecessor and path dictionaries can be accessed by + the caller through the original pred and paths objects passed + as arguments. No need to explicitly return pred or paths. + + """ + G_succ = G._adj # For speed-up (and works for both directed and undirected graphs) + + dist = {} # dictionary of final distances + seen = {} + # fringe is heapq with 3-tuples (distance,c,node) + # use the count c to avoid comparing nodes (may not be able to) + c = count() + fringe = [] + for source in sources: + seen[source] = 0 + heappush(fringe, (0, next(c), source)) + while fringe: + (d, _, v) = heappop(fringe) + if v in dist: + continue # already searched this node. + dist[v] = d + if v == target: + break + for u, e in G_succ[v].items(): + cost = weight(v, u, e) + if cost is None: + continue + vu_dist = dist[v] + cost + if cutoff is not None: + if vu_dist > cutoff: + continue + if u in dist: + u_dist = dist[u] + if vu_dist < u_dist: + raise ValueError("Contradictory paths found:", "negative weights?") + elif pred is not None and vu_dist == u_dist: + pred[u].append(v) + elif u not in seen or vu_dist < seen[u]: + seen[u] = vu_dist + heappush(fringe, (vu_dist, next(c), u)) + if paths is not None: + paths[u] = paths[v] + [u] + if pred is not None: + pred[u] = [v] + elif vu_dist == seen[u]: + if pred is not None: + pred[u].append(v) + + # The optional predecessor and path dictionaries can be accessed + # by the caller via the pred and paths objects passed as arguments. + return dist + + +@nx._dispatchable(edge_attrs="weight") +def dijkstra_predecessor_and_distance(G, source, cutoff=None, weight="weight"): + """Compute weighted shortest path length and predecessors. + + Uses Dijkstra's Method to obtain the shortest weighted paths + and return dictionaries of predecessors for each node and + distance for each node from the `source`. + + Parameters + ---------- + G : NetworkX graph + + source : node label + Starting node for path + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + pred, distance : dictionaries + Returns two dictionaries representing a list of predecessors + of a node and the distance to each node. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The list of predecessors contains more than one element only when + there are more than one shortest paths to the key node. + + Examples + -------- + >>> G = nx.path_graph(5, create_using=nx.DiGraph()) + >>> pred, dist = nx.dijkstra_predecessor_and_distance(G, 0) + >>> sorted(pred.items()) + [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])] + >>> sorted(dist.items()) + [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] + + >>> pred, dist = nx.dijkstra_predecessor_and_distance(G, 0, 1) + >>> sorted(pred.items()) + [(0, []), (1, [0])] + >>> sorted(dist.items()) + [(0, 0), (1, 1)] + """ + if source not in G: + raise nx.NodeNotFound(f"Node {source} is not found in the graph") + weight = _weight_function(G, weight) + pred = {source: []} # dictionary of predecessors + return (pred, _dijkstra(G, source, weight, pred=pred, cutoff=cutoff)) + + +@nx._dispatchable(edge_attrs="weight") +def all_pairs_dijkstra(G, cutoff=None, weight="weight"): + """Find shortest weighted paths and lengths between all nodes. + + Parameters + ---------- + G : NetworkX graph + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edge[u][v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Yields + ------ + (node, (distance, path)) : (node obj, (dict, dict)) + Each source node has two associated dicts. The first holds distance + keyed by target and the second holds paths keyed by target. + (See single_source_dijkstra for the source/target node terminology.) + If desired you can apply `dict()` to this function to create a dict + keyed by source node to the two dicts. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> len_path = dict(nx.all_pairs_dijkstra(G)) + >>> len_path[3][0][1] + 2 + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"3 - {node}: {len_path[3][0][node]}") + 3 - 0: 3 + 3 - 1: 2 + 3 - 2: 1 + 3 - 3: 0 + 3 - 4: 1 + >>> len_path[3][1][1] + [3, 2, 1] + >>> for n, (dist, path) in nx.all_pairs_dijkstra(G): + ... print(path[1]) + [0, 1] + [1] + [2, 1] + [3, 2, 1] + [4, 3, 2, 1] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The yielded dicts only have keys for reachable nodes. + """ + for n in G: + dist, path = single_source_dijkstra(G, n, cutoff=cutoff, weight=weight) + yield (n, (dist, path)) + + +@nx._dispatchable(edge_attrs="weight") +def all_pairs_dijkstra_path_length(G, cutoff=None, weight="weight"): + """Compute shortest path lengths between all nodes in a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + distance : iterator + (source, dictionary) iterator with dictionary keyed by target and + shortest path length as the key value. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length = dict(nx.all_pairs_dijkstra_path_length(G)) + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"1 - {node}: {length[1][node]}") + 1 - 0: 1 + 1 - 1: 0 + 1 - 2: 1 + 1 - 3: 2 + 1 - 4: 3 + >>> length[3][2] + 1 + >>> length[2][2] + 0 + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The dictionary returned only has keys for reachable node pairs. + """ + length = single_source_dijkstra_path_length + for n in G: + yield (n, length(G, n, cutoff=cutoff, weight=weight)) + + +@nx._dispatchable(edge_attrs="weight") +def all_pairs_dijkstra_path(G, cutoff=None, weight="weight"): + """Compute shortest paths between all nodes in a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + cutoff : integer or float, optional + Length (sum of edge weights) at which the search is stopped. + If cutoff is provided, only return paths with summed weight <= cutoff. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + paths : iterator + (source, dictionary) iterator with dictionary keyed by target and + shortest path as the key value. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> path = dict(nx.all_pairs_dijkstra_path(G)) + >>> path[0][4] + [0, 1, 2, 3, 4] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + See Also + -------- + floyd_warshall, all_pairs_bellman_ford_path + + """ + path = single_source_dijkstra_path + # TODO This can be trivially parallelized. + for n in G: + yield (n, path(G, n, cutoff=cutoff, weight=weight)) + + +@nx._dispatchable(edge_attrs="weight") +def bellman_ford_predecessor_and_distance( + G, source, target=None, weight="weight", heuristic=False +): + """Compute shortest path lengths and predecessors on shortest paths + in weighted graphs. + + The algorithm has a running time of $O(mn)$ where $n$ is the number of + nodes and $m$ is the number of edges. It is slower than Dijkstra but + can handle negative edge weights. + + If a negative cycle is detected, you can use :func:`find_negative_cycle` + to return the cycle and examine it. Shortest paths are not defined when + a negative cycle exists because once reached, the path can cycle forever + to build up arbitrarily low weights. + + Parameters + ---------- + G : NetworkX graph + The algorithm works for all types of graphs, including directed + graphs and multigraphs. + + source: node label + Starting node for path + + target : node label, optional + Ending node for path + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + heuristic : bool + Determines whether to use a heuristic to early detect negative + cycles at a hopefully negligible cost. + + Returns + ------- + pred, dist : dictionaries + Returns two dictionaries keyed by node to predecessor in the + path and to the distance from the source respectively. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + NetworkXUnbounded + If the (di)graph contains a negative (di)cycle, the + algorithm raises an exception to indicate the presence of the + negative (di)cycle. Note: any negative weight edge in an + undirected graph is a negative cycle. + + Examples + -------- + >>> G = nx.path_graph(5, create_using=nx.DiGraph()) + >>> pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0) + >>> sorted(pred.items()) + [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])] + >>> sorted(dist.items()) + [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] + + >>> pred, dist = nx.bellman_ford_predecessor_and_distance(G, 0, 1) + >>> sorted(pred.items()) + [(0, []), (1, [0]), (2, [1]), (3, [2]), (4, [3])] + >>> sorted(dist.items()) + [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] + + >>> G = nx.cycle_graph(5, create_using=nx.DiGraph()) + >>> G[1][2]["weight"] = -7 + >>> nx.bellman_ford_predecessor_and_distance(G, 0) + Traceback (most recent call last): + ... + networkx.exception.NetworkXUnbounded: Negative cycle detected. + + See Also + -------- + find_negative_cycle + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The dictionaries returned only have keys for nodes reachable from + the source. + + In the case where the (di)graph is not connected, if a component + not containing the source contains a negative (di)cycle, it + will not be detected. + + In NetworkX v2.1 and prior, the source node had predecessor `[None]`. + In NetworkX v2.2 this changed to the source node having predecessor `[]` + """ + if source not in G: + raise nx.NodeNotFound(f"Node {source} is not found in the graph") + weight = _weight_function(G, weight) + if G.is_multigraph(): + if any( + weight(u, v, {k: d}) < 0 + for u, v, k, d in nx.selfloop_edges(G, keys=True, data=True) + ): + raise nx.NetworkXUnbounded("Negative cycle detected.") + else: + if any(weight(u, v, d) < 0 for u, v, d in nx.selfloop_edges(G, data=True)): + raise nx.NetworkXUnbounded("Negative cycle detected.") + + dist = {source: 0} + pred = {source: []} + + if len(G) == 1: + return pred, dist + + weight = _weight_function(G, weight) + + dist = _bellman_ford( + G, [source], weight, pred=pred, dist=dist, target=target, heuristic=heuristic + ) + return (pred, dist) + + +def _bellman_ford( + G, + source, + weight, + pred=None, + paths=None, + dist=None, + target=None, + heuristic=True, +): + """Calls relaxation loop for Bellman–Ford algorithm and builds paths + + This is an implementation of the SPFA variant. + See https://en.wikipedia.org/wiki/Shortest_Path_Faster_Algorithm + + Parameters + ---------- + G : NetworkX graph + + source: list + List of source nodes. The shortest path from any of the source + nodes will be found if multiple sources are provided. + + weight : function + The weight of an edge is the value returned by the function. The + function must accept exactly three positional arguments: the two + endpoints of an edge and the dictionary of edge attributes for + that edge. The function must return a number. + + pred: dict of lists, optional (default=None) + dict to store a list of predecessors keyed by that node + If None, predecessors are not stored + + paths: dict, optional (default=None) + dict to store the path list from source to each node, keyed by node + If None, paths are not stored + + dist: dict, optional (default=None) + dict to store distance from source to the keyed node + If None, returned dist dict contents default to 0 for every node in the + source list + + target: node label, optional + Ending node for path. Path lengths to other destinations may (and + probably will) be incorrect. + + heuristic : bool + Determines whether to use a heuristic to early detect negative + cycles at a hopefully negligible cost. + + Returns + ------- + dist : dict + Returns a dict keyed by node to the distance from the source. + Dicts for paths and pred are in the mutated input dicts by those names. + + Raises + ------ + NodeNotFound + If any of `source` is not in `G`. + + NetworkXUnbounded + If the (di)graph contains a negative (di)cycle, the + algorithm raises an exception to indicate the presence of the + negative (di)cycle. Note: any negative weight edge in an + undirected graph is a negative cycle + """ + if pred is None: + pred = {v: [] for v in source} + + if dist is None: + dist = dict.fromkeys(source, 0) + + negative_cycle_found = _inner_bellman_ford( + G, + source, + weight, + pred, + dist, + heuristic, + ) + if negative_cycle_found is not None: + raise nx.NetworkXUnbounded("Negative cycle detected.") + + if paths is not None: + sources = set(source) + dsts = [target] if target is not None else pred + for dst in dsts: + gen = _build_paths_from_predecessors(sources, dst, pred) + paths[dst] = next(gen) + + return dist + + +def _inner_bellman_ford( + G, + sources, + weight, + pred, + dist=None, + heuristic=True, +): + """Inner Relaxation loop for Bellman–Ford algorithm. + + This is an implementation of the SPFA variant. + See https://en.wikipedia.org/wiki/Shortest_Path_Faster_Algorithm + + Parameters + ---------- + G : NetworkX graph + + source: list + List of source nodes. The shortest path from any of the source + nodes will be found if multiple sources are provided. + + weight : function + The weight of an edge is the value returned by the function. The + function must accept exactly three positional arguments: the two + endpoints of an edge and the dictionary of edge attributes for + that edge. The function must return a number. + + pred: dict of lists + dict to store a list of predecessors keyed by that node + + dist: dict, optional (default=None) + dict to store distance from source to the keyed node + If None, returned dist dict contents default to 0 for every node in the + source list + + heuristic : bool + Determines whether to use a heuristic to early detect negative + cycles at a hopefully negligible cost. + + Returns + ------- + node or None + Return a node `v` where processing discovered a negative cycle. + If no negative cycle found, return None. + + Raises + ------ + NodeNotFound + If any of `source` is not in `G`. + """ + for s in sources: + if s not in G: + raise nx.NodeNotFound(f"Source {s} not in G") + + if pred is None: + pred = {v: [] for v in sources} + + if dist is None: + dist = dict.fromkeys(sources, 0) + + # Heuristic Storage setup. Note: use None because nodes cannot be None + nonexistent_edge = (None, None) + pred_edge = dict.fromkeys(sources) + recent_update = dict.fromkeys(sources, nonexistent_edge) + + G_succ = G._adj # For speed-up (and works for both directed and undirected graphs) + inf = float("inf") + n = len(G) + + count = {} + q = deque(sources) + in_q = set(sources) + while q: + u = q.popleft() + in_q.remove(u) + + # Skip relaxations if any of the predecessors of u is in the queue. + if all(pred_u not in in_q for pred_u in pred[u]): + dist_u = dist[u] + for v, e in G_succ[u].items(): + dist_v = dist_u + weight(u, v, e) + + if dist_v < dist.get(v, inf): + # In this conditional branch we are updating the path with v. + # If it happens that some earlier update also added node v + # that implies the existence of a negative cycle since + # after the update node v would lie on the update path twice. + # The update path is stored up to one of the source nodes, + # therefore u is always in the dict recent_update + if heuristic: + if v in recent_update[u]: + # Negative cycle found! + pred[v].append(u) + return v + + # Transfer the recent update info from u to v if the + # same source node is the head of the update path. + # If the source node is responsible for the cost update, + # then clear the history and use it instead. + if v in pred_edge and pred_edge[v] == u: + recent_update[v] = recent_update[u] + else: + recent_update[v] = (u, v) + + if v not in in_q: + q.append(v) + in_q.add(v) + count_v = count.get(v, 0) + 1 + if count_v == n: + # Negative cycle found! + return v + + count[v] = count_v + dist[v] = dist_v + pred[v] = [u] + pred_edge[v] = u + + elif dist.get(v) is not None and dist_v == dist.get(v): + pred[v].append(u) + + # successfully found shortest_path. No negative cycles found. + return None + + +@nx._dispatchable(edge_attrs="weight") +def bellman_ford_path(G, source, target, weight="weight"): + """Returns the shortest path from source to target in a weighted graph G. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node + + target : node + Ending node + + weight : string or function (default="weight") + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + path : list + List of nodes in a shortest path. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + NetworkXNoPath + If no path exists between source and target. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.bellman_ford_path(G, 0, 4) + [0, 1, 2, 3, 4] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + See Also + -------- + dijkstra_path, bellman_ford_path_length + """ + length, path = single_source_bellman_ford(G, source, target=target, weight=weight) + return path + + +@nx._dispatchable(edge_attrs="weight") +def bellman_ford_path_length(G, source, target, weight="weight"): + """Returns the shortest path length from source to target + in a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + source : node label + starting node for path + + target : node label + ending node for path + + weight : string or function (default="weight") + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + length : number + Shortest path length. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + NetworkXNoPath + If no path exists between source and target. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.bellman_ford_path_length(G, 0, 4) + 4 + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + See Also + -------- + dijkstra_path_length, bellman_ford_path + """ + if source == target: + if source not in G: + raise nx.NodeNotFound(f"Node {source} not found in graph") + return 0 + + weight = _weight_function(G, weight) + + length = _bellman_ford(G, [source], weight, target=target) + + try: + return length[target] + except KeyError as err: + raise nx.NetworkXNoPath(f"node {target} not reachable from {source}") from err + + +@nx._dispatchable(edge_attrs="weight") +def single_source_bellman_ford_path(G, source, weight="weight"): + """Compute shortest path between source and all other reachable + nodes for a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path. + + weight : string or function (default="weight") + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + paths : dictionary + Dictionary of shortest path lengths keyed by target. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> path = nx.single_source_bellman_ford_path(G, 0) + >>> path[4] + [0, 1, 2, 3, 4] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + See Also + -------- + single_source_dijkstra, single_source_bellman_ford + + """ + (length, path) = single_source_bellman_ford(G, source, weight=weight) + return path + + +@nx._dispatchable(edge_attrs="weight") +def single_source_bellman_ford_path_length(G, source, weight="weight"): + """Compute the shortest path length between source and all other + reachable nodes for a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + source : node label + Starting node for path + + weight : string or function (default="weight") + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + length : dictionary + Dictionary of shortest path length keyed by target + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length = nx.single_source_bellman_ford_path_length(G, 0) + >>> length[4] + 4 + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"{node}: {length[node]}") + 0: 0 + 1: 1 + 2: 2 + 3: 3 + 4: 4 + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + See Also + -------- + single_source_dijkstra, single_source_bellman_ford + + """ + weight = _weight_function(G, weight) + return _bellman_ford(G, [source], weight) + + +@nx._dispatchable(edge_attrs="weight") +def single_source_bellman_ford(G, source, target=None, weight="weight"): + """Compute shortest paths and lengths in a weighted graph G. + + Uses Bellman-Ford algorithm for shortest paths. + + Parameters + ---------- + G : NetworkX graph + + source : node label + Starting node for path + + target : node label, optional + Ending node for path + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + distance, path : pair of dictionaries, or numeric and list + If target is None, returns a tuple of two dictionaries keyed by node. + The first dictionary stores distance from one of the source nodes. + The second stores the path from one of the sources to that node. + If target is not None, returns a tuple of (distance, path) where + distance is the distance from source to target and path is a list + representing the path from source to target. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length, path = nx.single_source_bellman_ford(G, 0) + >>> length[4] + 4 + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"{node}: {length[node]}") + 0: 0 + 1: 1 + 2: 2 + 3: 3 + 4: 4 + >>> path[4] + [0, 1, 2, 3, 4] + >>> length, path = nx.single_source_bellman_ford(G, 0, 1) + >>> length + 1 + >>> path + [0, 1] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + See Also + -------- + single_source_dijkstra + single_source_bellman_ford_path + single_source_bellman_ford_path_length + """ + if source == target: + if source not in G: + raise nx.NodeNotFound(f"Node {source} is not found in the graph") + return (0, [source]) + + weight = _weight_function(G, weight) + + paths = {source: [source]} # dictionary of paths + dist = _bellman_ford(G, [source], weight, paths=paths, target=target) + if target is None: + return (dist, paths) + try: + return (dist[target], paths[target]) + except KeyError as err: + msg = f"Node {target} not reachable from {source}" + raise nx.NetworkXNoPath(msg) from err + + +@nx._dispatchable(edge_attrs="weight") +def all_pairs_bellman_ford_path_length(G, weight="weight"): + """Compute shortest path lengths between all nodes in a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + weight : string or function (default="weight") + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + distance : iterator + (source, dictionary) iterator with dictionary keyed by target and + shortest path length as the key value. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length = dict(nx.all_pairs_bellman_ford_path_length(G)) + >>> for node in [0, 1, 2, 3, 4]: + ... print(f"1 - {node}: {length[1][node]}") + 1 - 0: 1 + 1 - 1: 0 + 1 - 2: 1 + 1 - 3: 2 + 1 - 4: 3 + >>> length[3][2] + 1 + >>> length[2][2] + 0 + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The dictionary returned only has keys for reachable node pairs. + """ + length = single_source_bellman_ford_path_length + for n in G: + yield (n, dict(length(G, n, weight=weight))) + + +@nx._dispatchable(edge_attrs="weight") +def all_pairs_bellman_ford_path(G, weight="weight"): + """Compute shortest paths between all nodes in a weighted graph. + + Parameters + ---------- + G : NetworkX graph + + weight : string or function (default="weight") + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + paths : iterator + (source, dictionary) iterator with dictionary keyed by target and + shortest path as the key value. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> path = dict(nx.all_pairs_bellman_ford_path(G)) + >>> path[0][4] + [0, 1, 2, 3, 4] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + See Also + -------- + floyd_warshall, all_pairs_dijkstra_path + + """ + path = single_source_bellman_ford_path + for n in G: + yield (n, path(G, n, weight=weight)) + + +@nx._dispatchable(edge_attrs="weight") +def goldberg_radzik(G, source, weight="weight"): + """Compute shortest path lengths and predecessors on shortest paths + in weighted graphs. + + The algorithm has a running time of $O(mn)$ where $n$ is the number of + nodes and $m$ is the number of edges. It is slower than Dijkstra but + can handle negative edge weights. + + Parameters + ---------- + G : NetworkX graph + The algorithm works for all types of graphs, including directed + graphs and multigraphs. + + source: node label + Starting node for path + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + pred, dist : dictionaries + Returns two dictionaries keyed by node to predecessor in the + path and to the distance from the source respectively. + + Raises + ------ + NodeNotFound + If `source` is not in `G`. + + NetworkXUnbounded + If the (di)graph contains a negative (di)cycle, the + algorithm raises an exception to indicate the presence of the + negative (di)cycle. Note: any negative weight edge in an + undirected graph is a negative cycle. + + As of NetworkX v3.2, a zero weight cycle is no longer + incorrectly reported as a negative weight cycle. + + + Examples + -------- + >>> G = nx.path_graph(5, create_using=nx.DiGraph()) + >>> pred, dist = nx.goldberg_radzik(G, 0) + >>> sorted(pred.items()) + [(0, None), (1, 0), (2, 1), (3, 2), (4, 3)] + >>> sorted(dist.items()) + [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] + + >>> G = nx.cycle_graph(5, create_using=nx.DiGraph()) + >>> G[1][2]["weight"] = -7 + >>> nx.goldberg_radzik(G, 0) + Traceback (most recent call last): + ... + networkx.exception.NetworkXUnbounded: Negative cycle detected. + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The dictionaries returned only have keys for nodes reachable from + the source. + + In the case where the (di)graph is not connected, if a component + not containing the source contains a negative (di)cycle, it + will not be detected. + + """ + if source not in G: + raise nx.NodeNotFound(f"Node {source} is not found in the graph") + weight = _weight_function(G, weight) + if G.is_multigraph(): + if any( + weight(u, v, {k: d}) < 0 + for u, v, k, d in nx.selfloop_edges(G, keys=True, data=True) + ): + raise nx.NetworkXUnbounded("Negative cycle detected.") + else: + if any(weight(u, v, d) < 0 for u, v, d in nx.selfloop_edges(G, data=True)): + raise nx.NetworkXUnbounded("Negative cycle detected.") + + if len(G) == 1: + return {source: None}, {source: 0} + + G_succ = G._adj # For speed-up (and works for both directed and undirected graphs) + + inf = float("inf") + d = dict.fromkeys(G, inf) + d[source] = 0 + pred = {source: None} + + def topo_sort(relabeled): + """Topologically sort nodes relabeled in the previous round and detect + negative cycles. + """ + # List of nodes to scan in this round. Denoted by A in Goldberg and + # Radzik's paper. + to_scan = [] + # In the DFS in the loop below, neg_count records for each node the + # number of edges of negative reduced costs on the path from a DFS root + # to the node in the DFS forest. The reduced cost of an edge (u, v) is + # defined as d[u] + weight[u][v] - d[v]. + # + # neg_count also doubles as the DFS visit marker array. + neg_count = {} + for u in relabeled: + # Skip visited nodes. + if u in neg_count: + continue + d_u = d[u] + # Skip nodes without out-edges of negative reduced costs. + if all(d_u + weight(u, v, e) >= d[v] for v, e in G_succ[u].items()): + continue + # Nonrecursive DFS that inserts nodes reachable from u via edges of + # nonpositive reduced costs into to_scan in (reverse) topological + # order. + stack = [(u, iter(G_succ[u].items()))] + in_stack = {u} + neg_count[u] = 0 + while stack: + u, it = stack[-1] + try: + v, e = next(it) + except StopIteration: + to_scan.append(u) + stack.pop() + in_stack.remove(u) + continue + t = d[u] + weight(u, v, e) + d_v = d[v] + if t < d_v: + is_neg = t < d_v + d[v] = t + pred[v] = u + if v not in neg_count: + neg_count[v] = neg_count[u] + int(is_neg) + stack.append((v, iter(G_succ[v].items()))) + in_stack.add(v) + elif v in in_stack and neg_count[u] + int(is_neg) > neg_count[v]: + # (u, v) is a back edge, and the cycle formed by the + # path v to u and (u, v) contains at least one edge of + # negative reduced cost. The cycle must be of negative + # cost. + raise nx.NetworkXUnbounded("Negative cycle detected.") + to_scan.reverse() + return to_scan + + def relax(to_scan): + """Relax out-edges of relabeled nodes.""" + relabeled = set() + # Scan nodes in to_scan in topological order and relax incident + # out-edges. Add the relabled nodes to labeled. + for u in to_scan: + d_u = d[u] + for v, e in G_succ[u].items(): + w_e = weight(u, v, e) + if d_u + w_e < d[v]: + d[v] = d_u + w_e + pred[v] = u + relabeled.add(v) + return relabeled + + # Set of nodes relabled in the last round of scan operations. Denoted by B + # in Goldberg and Radzik's paper. + relabeled = {source} + + while relabeled: + to_scan = topo_sort(relabeled) + relabeled = relax(to_scan) + + d = {u: d[u] for u in pred} + return pred, d + + +@nx._dispatchable(edge_attrs="weight") +def negative_edge_cycle(G, weight="weight", heuristic=True): + """Returns True if there exists a negative edge cycle anywhere in G. + + Parameters + ---------- + G : NetworkX graph + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + heuristic : bool + Determines whether to use a heuristic to early detect negative + cycles at a negligible cost. In case of graphs with a negative cycle, + the performance of detection increases by at least an order of magnitude. + + Returns + ------- + negative_cycle : bool + True if a negative edge cycle exists, otherwise False. + + Examples + -------- + >>> G = nx.cycle_graph(5, create_using=nx.DiGraph()) + >>> print(nx.negative_edge_cycle(G)) + False + >>> G[1][2]["weight"] = -7 + >>> print(nx.negative_edge_cycle(G)) + True + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + This algorithm uses bellman_ford_predecessor_and_distance() but finds + negative cycles on any component by first adding a new node connected to + every node, and starting bellman_ford_predecessor_and_distance on that + node. It then removes that extra node. + """ + if G.size() == 0: + return False + + # find unused node to use temporarily + newnode = -1 + while newnode in G: + newnode -= 1 + # connect it to all nodes + G.add_edges_from([(newnode, n) for n in G]) + + try: + bellman_ford_predecessor_and_distance( + G, newnode, weight=weight, heuristic=heuristic + ) + except nx.NetworkXUnbounded: + return True + finally: + G.remove_node(newnode) + return False + + +@nx._dispatchable(edge_attrs="weight") +def find_negative_cycle(G, source, weight="weight"): + """Returns a cycle with negative total weight if it exists. + + Bellman-Ford is used to find shortest_paths. That algorithm + stops if there exists a negative cycle. This algorithm + picks up from there and returns the found negative cycle. + + The cycle consists of a list of nodes in the cycle order. The last + node equals the first to make it a cycle. + You can look up the edge weights in the original graph. In the case + of multigraphs the relevant edge is the minimal weight edge between + the nodes in the 2-tuple. + + If the graph has no negative cycle, a NetworkXError is raised. + + Parameters + ---------- + G : NetworkX graph + + source: node label + The search for the negative cycle will start from this node. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from( + ... [(0, 1, 2), (1, 2, 2), (2, 0, 1), (1, 4, 2), (4, 0, -5)] + ... ) + >>> nx.find_negative_cycle(G, 0) + [4, 0, 1, 4] + + Returns + ------- + cycle : list + A list of nodes in the order of the cycle found. The last node + equals the first to indicate a cycle. + + Raises + ------ + NetworkXError + If no negative cycle is found. + """ + weight = _weight_function(G, weight) + pred = {source: []} + + v = _inner_bellman_ford(G, [source], weight, pred=pred) + if v is None: + raise nx.NetworkXError("No negative cycles detected.") + + # negative cycle detected... find it + neg_cycle = [] + stack = [(v, list(pred[v]))] + seen = {v} + while stack: + node, preds = stack[-1] + if v in preds: + # found the cycle + neg_cycle.extend([node, v]) + neg_cycle = list(reversed(neg_cycle)) + return neg_cycle + + if preds: + nbr = preds.pop() + if nbr not in seen: + stack.append((nbr, list(pred[nbr]))) + neg_cycle.append(node) + seen.add(nbr) + else: + stack.pop() + if neg_cycle: + neg_cycle.pop() + else: + if v in G[v] and weight(G, v, v) < 0: + return [v, v] + # should not reach here + raise nx.NetworkXError("Negative cycle is detected but not found") + # should not get here... + msg = "negative cycle detected but not identified" + raise nx.NetworkXUnbounded(msg) + + +@nx._dispatchable(edge_attrs="weight") +def bidirectional_dijkstra(G, source, target, weight="weight"): + r"""Dijkstra's algorithm for shortest paths using bidirectional search. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node. + + target : node + Ending node. + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + Returns + ------- + length, path : number and list + length is the distance from source to target. + path is a list of nodes on a path from source to target. + + Raises + ------ + NodeNotFound + If `source` or `target` is not in `G`. + + NetworkXNoPath + If no path exists between source and target. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> length, path = nx.bidirectional_dijkstra(G, 0, 4) + >>> print(length) + 4 + >>> print(path) + [0, 1, 2, 3, 4] + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + In practice bidirectional Dijkstra is much more than twice as fast as + ordinary Dijkstra. + + Ordinary Dijkstra expands nodes in a sphere-like manner from the + source. The radius of this sphere will eventually be the length + of the shortest path. Bidirectional Dijkstra will expand nodes + from both the source and the target, making two spheres of half + this radius. Volume of the first sphere is `\pi*r*r` while the + others are `2*\pi*r/2*r/2`, making up half the volume. + + This algorithm is not guaranteed to work if edge weights + are negative or are floating point numbers + (overflows and roundoff errors can cause problems). + + See Also + -------- + shortest_path + shortest_path_length + """ + if source not in G: + raise nx.NodeNotFound(f"Source {source} is not in G") + + if target not in G: + raise nx.NodeNotFound(f"Target {target} is not in G") + + if source == target: + return (0, [source]) + + weight = _weight_function(G, weight) + # Init: [Forward, Backward] + dists = [{}, {}] # dictionary of final distances + paths = [{source: [source]}, {target: [target]}] # dictionary of paths + fringe = [[], []] # heap of (distance, node) for choosing node to expand + seen = [{source: 0}, {target: 0}] # dict of distances to seen nodes + c = count() + # initialize fringe heap + heappush(fringe[0], (0, next(c), source)) + heappush(fringe[1], (0, next(c), target)) + # neighs for extracting correct neighbor information + if G.is_directed(): + neighs = [G._succ, G._pred] + else: + neighs = [G._adj, G._adj] + # variables to hold shortest discovered path + # finaldist = 1e30000 + finalpath = [] + dir = 1 + while fringe[0] and fringe[1]: + # choose direction + # dir == 0 is forward direction and dir == 1 is back + dir = 1 - dir + # extract closest to expand + (dist, _, v) = heappop(fringe[dir]) + if v in dists[dir]: + # Shortest path to v has already been found + continue + # update distance + dists[dir][v] = dist # equal to seen[dir][v] + if v in dists[1 - dir]: + # if we have scanned v in both directions we are done + # we have now discovered the shortest path + return (finaldist, finalpath) + + for w, d in neighs[dir][v].items(): + # weight(v, w, d) for forward and weight(w, v, d) for back direction + cost = weight(v, w, d) if dir == 0 else weight(w, v, d) + if cost is None: + continue + vwLength = dists[dir][v] + cost + if w in dists[dir]: + if vwLength < dists[dir][w]: + raise ValueError("Contradictory paths found: negative weights?") + elif w not in seen[dir] or vwLength < seen[dir][w]: + # relaxing + seen[dir][w] = vwLength + heappush(fringe[dir], (vwLength, next(c), w)) + paths[dir][w] = paths[dir][v] + [w] + if w in seen[0] and w in seen[1]: + # see if this path is better than the already + # discovered shortest path + totaldist = seen[0][w] + seen[1][w] + if finalpath == [] or finaldist > totaldist: + finaldist = totaldist + revpath = paths[1][w][:] + revpath.reverse() + finalpath = paths[0][w] + revpath[1:] + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") + + +@nx._dispatchable(edge_attrs="weight") +def johnson(G, weight="weight"): + r"""Uses Johnson's Algorithm to compute shortest paths. + + Johnson's Algorithm finds a shortest path between each pair of + nodes in a weighted graph even if negative weights are present. + + Parameters + ---------- + G : NetworkX graph + + weight : string or function + If this is a string, then edge weights will be accessed via the + edge attribute with this key (that is, the weight of the edge + joining `u` to `v` will be ``G.edges[u, v][weight]``). If no + such edge attribute exists, the weight of the edge is assumed to + be one. + + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number. + + Returns + ------- + distance : dictionary + Dictionary, keyed by source and target, of shortest paths. + + Examples + -------- + >>> graph = nx.DiGraph() + >>> graph.add_weighted_edges_from( + ... [("0", "3", 3), ("0", "1", -5), ("0", "2", 2), ("1", "2", 4), ("2", "3", 1)] + ... ) + >>> paths = nx.johnson(graph, weight="weight") + >>> paths["0"]["2"] + ['0', '1', '2'] + + Notes + ----- + Johnson's algorithm is suitable even for graphs with negative weights. It + works by using the Bellman–Ford algorithm to compute a transformation of + the input graph that removes all negative weights, allowing Dijkstra's + algorithm to be used on the transformed graph. + + The time complexity of this algorithm is $O(n^2 \log n + n m)$, + where $n$ is the number of nodes and $m$ the number of edges in the + graph. For dense graphs, this may be faster than the Floyd–Warshall + algorithm. + + See Also + -------- + floyd_warshall_predecessor_and_distance + floyd_warshall_numpy + all_pairs_shortest_path + all_pairs_shortest_path_length + all_pairs_dijkstra_path + bellman_ford_predecessor_and_distance + all_pairs_bellman_ford_path + all_pairs_bellman_ford_path_length + + """ + dist = dict.fromkeys(G, 0) + pred = {v: [] for v in G} + weight = _weight_function(G, weight) + + # Calculate distance of shortest paths + dist_bellman = _bellman_ford(G, list(G), weight, pred=pred, dist=dist) + + # Update the weight function to take into account the Bellman--Ford + # relaxation distances. + def new_weight(u, v, d): + return weight(u, v, d) + dist_bellman[u] - dist_bellman[v] + + def dist_path(v): + paths = {v: [v]} + _dijkstra(G, v, new_weight, paths=paths) + return paths + + return {v: dist_path(v) for v in G} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/similarity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/similarity.py new file mode 100644 index 0000000..245bc89 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/similarity.py @@ -0,0 +1,1783 @@ +"""Functions measuring similarity using graph edit distance. + +The graph edit distance is the number of edge/node changes needed +to make two graphs isomorphic. + +The default algorithm/implementation is sub-optimal for some graphs. +The problem of finding the exact Graph Edit Distance (GED) is NP-hard +so it is often slow. If the simple interface `graph_edit_distance` +takes too long for your graph, try `optimize_graph_edit_distance` +and/or `optimize_edit_paths`. + +At the same time, I encourage capable people to investigate +alternative GED algorithms, in order to improve the choices available. +""" + +import math +import time +import warnings +from dataclasses import dataclass +from itertools import product + +import networkx as nx +from networkx.utils import np_random_state + +__all__ = [ + "graph_edit_distance", + "optimal_edit_paths", + "optimize_graph_edit_distance", + "optimize_edit_paths", + "simrank_similarity", + "panther_similarity", + "generate_random_paths", +] + + +@nx._dispatchable( + graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True +) +def graph_edit_distance( + G1, + G2, + node_match=None, + edge_match=None, + node_subst_cost=None, + node_del_cost=None, + node_ins_cost=None, + edge_subst_cost=None, + edge_del_cost=None, + edge_ins_cost=None, + roots=None, + upper_bound=None, + timeout=None, +): + """Returns GED (graph edit distance) between graphs G1 and G2. + + Graph edit distance is a graph similarity measure analogous to + Levenshtein distance for strings. It is defined as minimum cost + of edit path (sequence of node and edge edit operations) + transforming graph G1 to graph isomorphic to G2. + + Parameters + ---------- + G1, G2: graphs + The two graphs G1 and G2 must be of the same type. + + node_match : callable + A function that returns True if node n1 in G1 and n2 in G2 + should be considered equal during matching. + + The function will be called like + + node_match(G1.nodes[n1], G2.nodes[n2]). + + That is, the function will receive the node attribute + dictionaries for n1 and n2 as inputs. + + Ignored if node_subst_cost is specified. If neither + node_match nor node_subst_cost are specified then node + attributes are not considered. + + edge_match : callable + A function that returns True if the edge attribute dictionaries + for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should + be considered equal during matching. + + The function will be called like + + edge_match(G1[u1][v1], G2[u2][v2]). + + That is, the function will receive the edge attribute + dictionaries of the edges under consideration. + + Ignored if edge_subst_cost is specified. If neither + edge_match nor edge_subst_cost are specified then edge + attributes are not considered. + + node_subst_cost, node_del_cost, node_ins_cost : callable + Functions that return the costs of node substitution, node + deletion, and node insertion, respectively. + + The functions will be called like + + node_subst_cost(G1.nodes[n1], G2.nodes[n2]), + node_del_cost(G1.nodes[n1]), + node_ins_cost(G2.nodes[n2]). + + That is, the functions will receive the node attribute + dictionaries as inputs. The functions are expected to return + positive numeric values. + + Function node_subst_cost overrides node_match if specified. + If neither node_match nor node_subst_cost are specified then + default node substitution cost of 0 is used (node attributes + are not considered during matching). + + If node_del_cost is not specified then default node deletion + cost of 1 is used. If node_ins_cost is not specified then + default node insertion cost of 1 is used. + + edge_subst_cost, edge_del_cost, edge_ins_cost : callable + Functions that return the costs of edge substitution, edge + deletion, and edge insertion, respectively. + + The functions will be called like + + edge_subst_cost(G1[u1][v1], G2[u2][v2]), + edge_del_cost(G1[u1][v1]), + edge_ins_cost(G2[u2][v2]). + + That is, the functions will receive the edge attribute + dictionaries as inputs. The functions are expected to return + positive numeric values. + + Function edge_subst_cost overrides edge_match if specified. + If neither edge_match nor edge_subst_cost are specified then + default edge substitution cost of 0 is used (edge attributes + are not considered during matching). + + If edge_del_cost is not specified then default edge deletion + cost of 1 is used. If edge_ins_cost is not specified then + default edge insertion cost of 1 is used. + + roots : 2-tuple + Tuple where first element is a node in G1 and the second + is a node in G2. + These nodes are forced to be matched in the comparison to + allow comparison between rooted graphs. + + upper_bound : numeric + Maximum edit distance to consider. Return None if no edit + distance under or equal to upper_bound exists. + + timeout : numeric + Maximum number of seconds to execute. + After timeout is met, the current best GED is returned. + + Examples + -------- + >>> G1 = nx.cycle_graph(6) + >>> G2 = nx.wheel_graph(7) + >>> nx.graph_edit_distance(G1, G2) + 7.0 + + >>> G1 = nx.star_graph(5) + >>> G2 = nx.star_graph(5) + >>> nx.graph_edit_distance(G1, G2, roots=(0, 0)) + 0.0 + >>> nx.graph_edit_distance(G1, G2, roots=(1, 0)) + 8.0 + + See Also + -------- + optimal_edit_paths, optimize_graph_edit_distance, + + is_isomorphic: test for graph edit distance of 0 + + References + ---------- + .. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick + Martineau. An Exact Graph Edit Distance Algorithm for Solving + Pattern Recognition Problems. 4th International Conference on + Pattern Recognition Applications and Methods 2015, Jan 2015, + Lisbon, Portugal. 2015, + <10.5220/0005209202710278>. + https://hal.archives-ouvertes.fr/hal-01168816 + + """ + bestcost = None + for _, _, cost in optimize_edit_paths( + G1, + G2, + node_match, + edge_match, + node_subst_cost, + node_del_cost, + node_ins_cost, + edge_subst_cost, + edge_del_cost, + edge_ins_cost, + upper_bound, + True, + roots, + timeout, + ): + # assert bestcost is None or cost < bestcost + bestcost = cost + return bestcost + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) +def optimal_edit_paths( + G1, + G2, + node_match=None, + edge_match=None, + node_subst_cost=None, + node_del_cost=None, + node_ins_cost=None, + edge_subst_cost=None, + edge_del_cost=None, + edge_ins_cost=None, + upper_bound=None, +): + """Returns all minimum-cost edit paths transforming G1 to G2. + + Graph edit path is a sequence of node and edge edit operations + transforming graph G1 to graph isomorphic to G2. Edit operations + include substitutions, deletions, and insertions. + + Parameters + ---------- + G1, G2: graphs + The two graphs G1 and G2 must be of the same type. + + node_match : callable + A function that returns True if node n1 in G1 and n2 in G2 + should be considered equal during matching. + + The function will be called like + + node_match(G1.nodes[n1], G2.nodes[n2]). + + That is, the function will receive the node attribute + dictionaries for n1 and n2 as inputs. + + Ignored if node_subst_cost is specified. If neither + node_match nor node_subst_cost are specified then node + attributes are not considered. + + edge_match : callable + A function that returns True if the edge attribute dictionaries + for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should + be considered equal during matching. + + The function will be called like + + edge_match(G1[u1][v1], G2[u2][v2]). + + That is, the function will receive the edge attribute + dictionaries of the edges under consideration. + + Ignored if edge_subst_cost is specified. If neither + edge_match nor edge_subst_cost are specified then edge + attributes are not considered. + + node_subst_cost, node_del_cost, node_ins_cost : callable + Functions that return the costs of node substitution, node + deletion, and node insertion, respectively. + + The functions will be called like + + node_subst_cost(G1.nodes[n1], G2.nodes[n2]), + node_del_cost(G1.nodes[n1]), + node_ins_cost(G2.nodes[n2]). + + That is, the functions will receive the node attribute + dictionaries as inputs. The functions are expected to return + positive numeric values. + + Function node_subst_cost overrides node_match if specified. + If neither node_match nor node_subst_cost are specified then + default node substitution cost of 0 is used (node attributes + are not considered during matching). + + If node_del_cost is not specified then default node deletion + cost of 1 is used. If node_ins_cost is not specified then + default node insertion cost of 1 is used. + + edge_subst_cost, edge_del_cost, edge_ins_cost : callable + Functions that return the costs of edge substitution, edge + deletion, and edge insertion, respectively. + + The functions will be called like + + edge_subst_cost(G1[u1][v1], G2[u2][v2]), + edge_del_cost(G1[u1][v1]), + edge_ins_cost(G2[u2][v2]). + + That is, the functions will receive the edge attribute + dictionaries as inputs. The functions are expected to return + positive numeric values. + + Function edge_subst_cost overrides edge_match if specified. + If neither edge_match nor edge_subst_cost are specified then + default edge substitution cost of 0 is used (edge attributes + are not considered during matching). + + If edge_del_cost is not specified then default edge deletion + cost of 1 is used. If edge_ins_cost is not specified then + default edge insertion cost of 1 is used. + + upper_bound : numeric + Maximum edit distance to consider. + + Returns + ------- + edit_paths : list of tuples (node_edit_path, edge_edit_path) + - node_edit_path : list of tuples ``(u, v)`` indicating node transformations + between `G1` and `G2`. ``u`` is `None` for insertion, ``v`` is `None` + for deletion. + - edge_edit_path : list of tuples ``((u1, v1), (u2, v2))`` indicating edge + transformations between `G1` and `G2`. ``(None, (u2,v2))`` for insertion + and ``((u1,v1), None)`` for deletion. + + cost : numeric + Optimal edit path cost (graph edit distance). When the cost + is zero, it indicates that `G1` and `G2` are isomorphic. + + Examples + -------- + >>> G1 = nx.cycle_graph(4) + >>> G2 = nx.wheel_graph(5) + >>> paths, cost = nx.optimal_edit_paths(G1, G2) + >>> len(paths) + 40 + >>> cost + 5.0 + + Notes + ----- + To transform `G1` into a graph isomorphic to `G2`, apply the node + and edge edits in the returned ``edit_paths``. + In the case of isomorphic graphs, the cost is zero, and the paths + represent different isomorphic mappings (isomorphisms). That is, the + edits involve renaming nodes and edges to match the structure of `G2`. + + See Also + -------- + graph_edit_distance, optimize_edit_paths + + References + ---------- + .. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick + Martineau. An Exact Graph Edit Distance Algorithm for Solving + Pattern Recognition Problems. 4th International Conference on + Pattern Recognition Applications and Methods 2015, Jan 2015, + Lisbon, Portugal. 2015, + <10.5220/0005209202710278>. + https://hal.archives-ouvertes.fr/hal-01168816 + + """ + paths = [] + bestcost = None + for vertex_path, edge_path, cost in optimize_edit_paths( + G1, + G2, + node_match, + edge_match, + node_subst_cost, + node_del_cost, + node_ins_cost, + edge_subst_cost, + edge_del_cost, + edge_ins_cost, + upper_bound, + False, + ): + # assert bestcost is None or cost <= bestcost + if bestcost is not None and cost < bestcost: + paths = [] + paths.append((vertex_path, edge_path)) + bestcost = cost + return paths, bestcost + + +@nx._dispatchable(graphs={"G1": 0, "G2": 1}) +def optimize_graph_edit_distance( + G1, + G2, + node_match=None, + edge_match=None, + node_subst_cost=None, + node_del_cost=None, + node_ins_cost=None, + edge_subst_cost=None, + edge_del_cost=None, + edge_ins_cost=None, + upper_bound=None, +): + """Returns consecutive approximations of GED (graph edit distance) + between graphs G1 and G2. + + Graph edit distance is a graph similarity measure analogous to + Levenshtein distance for strings. It is defined as minimum cost + of edit path (sequence of node and edge edit operations) + transforming graph G1 to graph isomorphic to G2. + + Parameters + ---------- + G1, G2: graphs + The two graphs G1 and G2 must be of the same type. + + node_match : callable + A function that returns True if node n1 in G1 and n2 in G2 + should be considered equal during matching. + + The function will be called like + + node_match(G1.nodes[n1], G2.nodes[n2]). + + That is, the function will receive the node attribute + dictionaries for n1 and n2 as inputs. + + Ignored if node_subst_cost is specified. If neither + node_match nor node_subst_cost are specified then node + attributes are not considered. + + edge_match : callable + A function that returns True if the edge attribute dictionaries + for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should + be considered equal during matching. + + The function will be called like + + edge_match(G1[u1][v1], G2[u2][v2]). + + That is, the function will receive the edge attribute + dictionaries of the edges under consideration. + + Ignored if edge_subst_cost is specified. If neither + edge_match nor edge_subst_cost are specified then edge + attributes are not considered. + + node_subst_cost, node_del_cost, node_ins_cost : callable + Functions that return the costs of node substitution, node + deletion, and node insertion, respectively. + + The functions will be called like + + node_subst_cost(G1.nodes[n1], G2.nodes[n2]), + node_del_cost(G1.nodes[n1]), + node_ins_cost(G2.nodes[n2]). + + That is, the functions will receive the node attribute + dictionaries as inputs. The functions are expected to return + positive numeric values. + + Function node_subst_cost overrides node_match if specified. + If neither node_match nor node_subst_cost are specified then + default node substitution cost of 0 is used (node attributes + are not considered during matching). + + If node_del_cost is not specified then default node deletion + cost of 1 is used. If node_ins_cost is not specified then + default node insertion cost of 1 is used. + + edge_subst_cost, edge_del_cost, edge_ins_cost : callable + Functions that return the costs of edge substitution, edge + deletion, and edge insertion, respectively. + + The functions will be called like + + edge_subst_cost(G1[u1][v1], G2[u2][v2]), + edge_del_cost(G1[u1][v1]), + edge_ins_cost(G2[u2][v2]). + + That is, the functions will receive the edge attribute + dictionaries as inputs. The functions are expected to return + positive numeric values. + + Function edge_subst_cost overrides edge_match if specified. + If neither edge_match nor edge_subst_cost are specified then + default edge substitution cost of 0 is used (edge attributes + are not considered during matching). + + If edge_del_cost is not specified then default edge deletion + cost of 1 is used. If edge_ins_cost is not specified then + default edge insertion cost of 1 is used. + + upper_bound : numeric + Maximum edit distance to consider. + + Returns + ------- + Generator of consecutive approximations of graph edit distance. + + Examples + -------- + >>> G1 = nx.cycle_graph(6) + >>> G2 = nx.wheel_graph(7) + >>> for v in nx.optimize_graph_edit_distance(G1, G2): + ... minv = v + >>> minv + 7.0 + + See Also + -------- + graph_edit_distance, optimize_edit_paths + + References + ---------- + .. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick + Martineau. An Exact Graph Edit Distance Algorithm for Solving + Pattern Recognition Problems. 4th International Conference on + Pattern Recognition Applications and Methods 2015, Jan 2015, + Lisbon, Portugal. 2015, + <10.5220/0005209202710278>. + https://hal.archives-ouvertes.fr/hal-01168816 + """ + for _, _, cost in optimize_edit_paths( + G1, + G2, + node_match, + edge_match, + node_subst_cost, + node_del_cost, + node_ins_cost, + edge_subst_cost, + edge_del_cost, + edge_ins_cost, + upper_bound, + True, + ): + yield cost + + +@nx._dispatchable( + graphs={"G1": 0, "G2": 1}, preserve_edge_attrs=True, preserve_node_attrs=True +) +def optimize_edit_paths( + G1, + G2, + node_match=None, + edge_match=None, + node_subst_cost=None, + node_del_cost=None, + node_ins_cost=None, + edge_subst_cost=None, + edge_del_cost=None, + edge_ins_cost=None, + upper_bound=None, + strictly_decreasing=True, + roots=None, + timeout=None, +): + """GED (graph edit distance) calculation: advanced interface. + + Graph edit path is a sequence of node and edge edit operations + transforming graph G1 to graph isomorphic to G2. Edit operations + include substitutions, deletions, and insertions. + + Graph edit distance is defined as minimum cost of edit path. + + Parameters + ---------- + G1, G2: graphs + The two graphs G1 and G2 must be of the same type. + + node_match : callable + A function that returns True if node n1 in G1 and n2 in G2 + should be considered equal during matching. + + The function will be called like + + node_match(G1.nodes[n1], G2.nodes[n2]). + + That is, the function will receive the node attribute + dictionaries for n1 and n2 as inputs. + + Ignored if node_subst_cost is specified. If neither + node_match nor node_subst_cost are specified then node + attributes are not considered. + + edge_match : callable + A function that returns True if the edge attribute dictionaries + for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should + be considered equal during matching. + + The function will be called like + + edge_match(G1[u1][v1], G2[u2][v2]). + + That is, the function will receive the edge attribute + dictionaries of the edges under consideration. + + Ignored if edge_subst_cost is specified. If neither + edge_match nor edge_subst_cost are specified then edge + attributes are not considered. + + node_subst_cost, node_del_cost, node_ins_cost : callable + Functions that return the costs of node substitution, node + deletion, and node insertion, respectively. + + The functions will be called like + + node_subst_cost(G1.nodes[n1], G2.nodes[n2]), + node_del_cost(G1.nodes[n1]), + node_ins_cost(G2.nodes[n2]). + + That is, the functions will receive the node attribute + dictionaries as inputs. The functions are expected to return + positive numeric values. + + Function node_subst_cost overrides node_match if specified. + If neither node_match nor node_subst_cost are specified then + default node substitution cost of 0 is used (node attributes + are not considered during matching). + + If node_del_cost is not specified then default node deletion + cost of 1 is used. If node_ins_cost is not specified then + default node insertion cost of 1 is used. + + edge_subst_cost, edge_del_cost, edge_ins_cost : callable + Functions that return the costs of edge substitution, edge + deletion, and edge insertion, respectively. + + The functions will be called like + + edge_subst_cost(G1[u1][v1], G2[u2][v2]), + edge_del_cost(G1[u1][v1]), + edge_ins_cost(G2[u2][v2]). + + That is, the functions will receive the edge attribute + dictionaries as inputs. The functions are expected to return + positive numeric values. + + Function edge_subst_cost overrides edge_match if specified. + If neither edge_match nor edge_subst_cost are specified then + default edge substitution cost of 0 is used (edge attributes + are not considered during matching). + + If edge_del_cost is not specified then default edge deletion + cost of 1 is used. If edge_ins_cost is not specified then + default edge insertion cost of 1 is used. + + upper_bound : numeric + Maximum edit distance to consider. + + strictly_decreasing : bool + If True, return consecutive approximations of strictly + decreasing cost. Otherwise, return all edit paths of cost + less than or equal to the previous minimum cost. + + roots : 2-tuple + Tuple where first element is a node in G1 and the second + is a node in G2. + These nodes are forced to be matched in the comparison to + allow comparison between rooted graphs. + + timeout : numeric + Maximum number of seconds to execute. + After timeout is met, the current best GED is returned. + + Returns + ------- + Generator of tuples (node_edit_path, edge_edit_path, cost) + node_edit_path : list of tuples (u, v) + edge_edit_path : list of tuples ((u1, v1), (u2, v2)) + cost : numeric + + See Also + -------- + graph_edit_distance, optimize_graph_edit_distance, optimal_edit_paths + + References + ---------- + .. [1] Zeina Abu-Aisheh, Romain Raveaux, Jean-Yves Ramel, Patrick + Martineau. An Exact Graph Edit Distance Algorithm for Solving + Pattern Recognition Problems. 4th International Conference on + Pattern Recognition Applications and Methods 2015, Jan 2015, + Lisbon, Portugal. 2015, + <10.5220/0005209202710278>. + https://hal.archives-ouvertes.fr/hal-01168816 + + """ + # TODO: support DiGraph + + import numpy as np + import scipy as sp + + @dataclass + class CostMatrix: + C: ... + lsa_row_ind: ... + lsa_col_ind: ... + ls: ... + + def make_CostMatrix(C, m, n): + # assert(C.shape == (m + n, m + n)) + lsa_row_ind, lsa_col_ind = sp.optimize.linear_sum_assignment(C) + + # Fixup dummy assignments: + # each substitution i<->j should have dummy assignment m+j<->n+i + # NOTE: fast reduce of Cv relies on it + # Create masks for substitution and dummy indices + is_subst = (lsa_row_ind < m) & (lsa_col_ind < n) + is_dummy = (lsa_row_ind >= m) & (lsa_col_ind >= n) + + # Map dummy assignments to the correct indices + lsa_row_ind[is_dummy] = lsa_col_ind[is_subst] + m + lsa_col_ind[is_dummy] = lsa_row_ind[is_subst] + n + + return CostMatrix( + C, lsa_row_ind, lsa_col_ind, C[lsa_row_ind, lsa_col_ind].sum() + ) + + def extract_C(C, i, j, m, n): + # assert(C.shape == (m + n, m + n)) + row_ind = [k in i or k - m in j for k in range(m + n)] + col_ind = [k in j or k - n in i for k in range(m + n)] + return C[row_ind, :][:, col_ind] + + def reduce_C(C, i, j, m, n): + # assert(C.shape == (m + n, m + n)) + row_ind = [k not in i and k - m not in j for k in range(m + n)] + col_ind = [k not in j and k - n not in i for k in range(m + n)] + return C[row_ind, :][:, col_ind] + + def reduce_ind(ind, i): + # assert set(ind) == set(range(len(ind))) + rind = ind[[k not in i for k in ind]] + for k in set(i): + rind[rind >= k] -= 1 + return rind + + def match_edges(u, v, pending_g, pending_h, Ce, matched_uv=None): + """ + Parameters: + u, v: matched vertices, u=None or v=None for + deletion/insertion + pending_g, pending_h: lists of edges not yet mapped + Ce: CostMatrix of pending edge mappings + matched_uv: partial vertex edit path + list of tuples (u, v) of previously matched vertex + mappings u<->v, u=None or v=None for + deletion/insertion + + Returns: + list of (i, j): indices of edge mappings g<->h + localCe: local CostMatrix of edge mappings + (basically submatrix of Ce at cross of rows i, cols j) + """ + M = len(pending_g) + N = len(pending_h) + # assert Ce.C.shape == (M + N, M + N) + + # only attempt to match edges after one node match has been made + # this will stop self-edges on the first node being automatically deleted + # even when a substitution is the better option + if matched_uv is None or len(matched_uv) == 0: + g_ind = [] + h_ind = [] + else: + g_ind = [ + i + for i in range(M) + if pending_g[i][:2] == (u, u) + or any( + pending_g[i][:2] in ((p, u), (u, p), (p, p)) for p, q in matched_uv + ) + ] + h_ind = [ + j + for j in range(N) + if pending_h[j][:2] == (v, v) + or any( + pending_h[j][:2] in ((q, v), (v, q), (q, q)) for p, q in matched_uv + ) + ] + + m = len(g_ind) + n = len(h_ind) + + if m or n: + C = extract_C(Ce.C, g_ind, h_ind, M, N) + # assert C.shape == (m + n, m + n) + + # Forbid structurally invalid matches + # NOTE: inf remembered from Ce construction + for k, i in enumerate(g_ind): + g = pending_g[i][:2] + for l, j in enumerate(h_ind): + h = pending_h[j][:2] + if nx.is_directed(G1) or nx.is_directed(G2): + if any( + g == (p, u) and h == (q, v) or g == (u, p) and h == (v, q) + for p, q in matched_uv + ): + continue + else: + if any( + g in ((p, u), (u, p)) and h in ((q, v), (v, q)) + for p, q in matched_uv + ): + continue + if g == (u, u) or any(g == (p, p) for p, q in matched_uv): + continue + if h == (v, v) or any(h == (q, q) for p, q in matched_uv): + continue + C[k, l] = inf + + localCe = make_CostMatrix(C, m, n) + ij = [ + ( + g_ind[k] if k < m else M + h_ind[l], + h_ind[l] if l < n else N + g_ind[k], + ) + for k, l in zip(localCe.lsa_row_ind, localCe.lsa_col_ind) + if k < m or l < n + ] + + else: + ij = [] + localCe = CostMatrix(np.empty((0, 0)), [], [], 0) + + return ij, localCe + + def reduce_Ce(Ce, ij, m, n): + if len(ij): + i, j = zip(*ij) + m_i = m - sum(1 for t in i if t < m) + n_j = n - sum(1 for t in j if t < n) + return make_CostMatrix(reduce_C(Ce.C, i, j, m, n), m_i, n_j) + return Ce + + def get_edit_ops( + matched_uv, pending_u, pending_v, Cv, pending_g, pending_h, Ce, matched_cost + ): + """ + Parameters: + matched_uv: partial vertex edit path + list of tuples (u, v) of vertex mappings u<->v, + u=None or v=None for deletion/insertion + pending_u, pending_v: lists of vertices not yet mapped + Cv: CostMatrix of pending vertex mappings + pending_g, pending_h: lists of edges not yet mapped + Ce: CostMatrix of pending edge mappings + matched_cost: cost of partial edit path + + Returns: + sequence of + (i, j): indices of vertex mapping u<->v + Cv_ij: reduced CostMatrix of pending vertex mappings + (basically Cv with row i, col j removed) + list of (x, y): indices of edge mappings g<->h + Ce_xy: reduced CostMatrix of pending edge mappings + (basically Ce with rows x, cols y removed) + cost: total cost of edit operation + NOTE: most promising ops first + """ + m = len(pending_u) + n = len(pending_v) + # assert Cv.C.shape == (m + n, m + n) + + # 1) a vertex mapping from optimal linear sum assignment + i, j = min( + (k, l) for k, l in zip(Cv.lsa_row_ind, Cv.lsa_col_ind) if k < m or l < n + ) + xy, localCe = match_edges( + pending_u[i] if i < m else None, + pending_v[j] if j < n else None, + pending_g, + pending_h, + Ce, + matched_uv, + ) + Ce_xy = reduce_Ce(Ce, xy, len(pending_g), len(pending_h)) + # assert Ce.ls <= localCe.ls + Ce_xy.ls + if prune(matched_cost + Cv.ls + localCe.ls + Ce_xy.ls): + pass + else: + # get reduced Cv efficiently + Cv_ij = CostMatrix( + reduce_C(Cv.C, (i,), (j,), m, n), + reduce_ind(Cv.lsa_row_ind, (i, m + j)), + reduce_ind(Cv.lsa_col_ind, (j, n + i)), + Cv.ls - Cv.C[i, j], + ) + yield (i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls + + # 2) other candidates, sorted by lower-bound cost estimate + other = [] + fixed_i, fixed_j = i, j + if m <= n: + candidates = ( + (t, fixed_j) + for t in range(m + n) + if t != fixed_i and (t < m or t == m + fixed_j) + ) + else: + candidates = ( + (fixed_i, t) + for t in range(m + n) + if t != fixed_j and (t < n or t == n + fixed_i) + ) + for i, j in candidates: + if prune(matched_cost + Cv.C[i, j] + Ce.ls): + continue + Cv_ij = make_CostMatrix( + reduce_C(Cv.C, (i,), (j,), m, n), + m - 1 if i < m else m, + n - 1 if j < n else n, + ) + # assert Cv.ls <= Cv.C[i, j] + Cv_ij.ls + if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + Ce.ls): + continue + xy, localCe = match_edges( + pending_u[i] if i < m else None, + pending_v[j] if j < n else None, + pending_g, + pending_h, + Ce, + matched_uv, + ) + if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + localCe.ls): + continue + Ce_xy = reduce_Ce(Ce, xy, len(pending_g), len(pending_h)) + # assert Ce.ls <= localCe.ls + Ce_xy.ls + if prune(matched_cost + Cv.C[i, j] + Cv_ij.ls + localCe.ls + Ce_xy.ls): + continue + other.append(((i, j), Cv_ij, xy, Ce_xy, Cv.C[i, j] + localCe.ls)) + + yield from sorted(other, key=lambda t: t[4] + t[1].ls + t[3].ls) + + def get_edit_paths( + matched_uv, + pending_u, + pending_v, + Cv, + matched_gh, + pending_g, + pending_h, + Ce, + matched_cost, + ): + """ + Parameters: + matched_uv: partial vertex edit path + list of tuples (u, v) of vertex mappings u<->v, + u=None or v=None for deletion/insertion + pending_u, pending_v: lists of vertices not yet mapped + Cv: CostMatrix of pending vertex mappings + matched_gh: partial edge edit path + list of tuples (g, h) of edge mappings g<->h, + g=None or h=None for deletion/insertion + pending_g, pending_h: lists of edges not yet mapped + Ce: CostMatrix of pending edge mappings + matched_cost: cost of partial edit path + + Returns: + sequence of (vertex_path, edge_path, cost) + vertex_path: complete vertex edit path + list of tuples (u, v) of vertex mappings u<->v, + u=None or v=None for deletion/insertion + edge_path: complete edge edit path + list of tuples (g, h) of edge mappings g<->h, + g=None or h=None for deletion/insertion + cost: total cost of edit path + NOTE: path costs are non-increasing + """ + if prune(matched_cost + Cv.ls + Ce.ls): + return + + if not max(len(pending_u), len(pending_v)): + # assert not len(pending_g) + # assert not len(pending_h) + # path completed! + # assert matched_cost <= maxcost_value + nonlocal maxcost_value + maxcost_value = min(maxcost_value, matched_cost) + yield matched_uv, matched_gh, matched_cost + + else: + edit_ops = get_edit_ops( + matched_uv, + pending_u, + pending_v, + Cv, + pending_g, + pending_h, + Ce, + matched_cost, + ) + for ij, Cv_ij, xy, Ce_xy, edit_cost in edit_ops: + i, j = ij + # assert Cv.C[i, j] + sum(Ce.C[t] for t in xy) == edit_cost + if prune(matched_cost + edit_cost + Cv_ij.ls + Ce_xy.ls): + continue + + # dive deeper + u = pending_u.pop(i) if i < len(pending_u) else None + v = pending_v.pop(j) if j < len(pending_v) else None + matched_uv.append((u, v)) + for x, y in xy: + len_g = len(pending_g) + len_h = len(pending_h) + matched_gh.append( + ( + pending_g[x] if x < len_g else None, + pending_h[y] if y < len_h else None, + ) + ) + sortedx = sorted(x for x, y in xy) + sortedy = sorted(y for x, y in xy) + G = [ + (pending_g.pop(x) if x < len(pending_g) else None) + for x in reversed(sortedx) + ] + H = [ + (pending_h.pop(y) if y < len(pending_h) else None) + for y in reversed(sortedy) + ] + + yield from get_edit_paths( + matched_uv, + pending_u, + pending_v, + Cv_ij, + matched_gh, + pending_g, + pending_h, + Ce_xy, + matched_cost + edit_cost, + ) + + # backtrack + if u is not None: + pending_u.insert(i, u) + if v is not None: + pending_v.insert(j, v) + matched_uv.pop() + for x, g in zip(sortedx, reversed(G)): + if g is not None: + pending_g.insert(x, g) + for y, h in zip(sortedy, reversed(H)): + if h is not None: + pending_h.insert(y, h) + for _ in xy: + matched_gh.pop() + + # Initialization + + pending_u = list(G1.nodes) + pending_v = list(G2.nodes) + + initial_cost = 0 + if roots: + root_u, root_v = roots + if root_u not in pending_u or root_v not in pending_v: + raise nx.NodeNotFound("Root node not in graph.") + + # remove roots from pending + pending_u.remove(root_u) + pending_v.remove(root_v) + + # cost matrix of vertex mappings + m = len(pending_u) + n = len(pending_v) + C = np.zeros((m + n, m + n)) + if node_subst_cost: + C[0:m, 0:n] = np.array( + [ + node_subst_cost(G1.nodes[u], G2.nodes[v]) + for u in pending_u + for v in pending_v + ] + ).reshape(m, n) + if roots: + initial_cost = node_subst_cost(G1.nodes[root_u], G2.nodes[root_v]) + elif node_match: + C[0:m, 0:n] = np.array( + [ + 1 - int(node_match(G1.nodes[u], G2.nodes[v])) + for u in pending_u + for v in pending_v + ] + ).reshape(m, n) + if roots: + initial_cost = 1 - node_match(G1.nodes[root_u], G2.nodes[root_v]) + else: + # all zeroes + pass + # assert not min(m, n) or C[0:m, 0:n].min() >= 0 + if node_del_cost: + del_costs = [node_del_cost(G1.nodes[u]) for u in pending_u] + else: + del_costs = [1] * len(pending_u) + # assert not m or min(del_costs) >= 0 + if node_ins_cost: + ins_costs = [node_ins_cost(G2.nodes[v]) for v in pending_v] + else: + ins_costs = [1] * len(pending_v) + # assert not n or min(ins_costs) >= 0 + inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1 + C[0:m, n : n + m] = np.array( + [del_costs[i] if i == j else inf for i in range(m) for j in range(m)] + ).reshape(m, m) + C[m : m + n, 0:n] = np.array( + [ins_costs[i] if i == j else inf for i in range(n) for j in range(n)] + ).reshape(n, n) + Cv = make_CostMatrix(C, m, n) + + pending_g = list(G1.edges) + pending_h = list(G2.edges) + + # cost matrix of edge mappings + m = len(pending_g) + n = len(pending_h) + C = np.zeros((m + n, m + n)) + if edge_subst_cost: + C[0:m, 0:n] = np.array( + [ + edge_subst_cost(G1.edges[g], G2.edges[h]) + for g in pending_g + for h in pending_h + ] + ).reshape(m, n) + elif edge_match: + C[0:m, 0:n] = np.array( + [ + 1 - int(edge_match(G1.edges[g], G2.edges[h])) + for g in pending_g + for h in pending_h + ] + ).reshape(m, n) + else: + # all zeroes + pass + # assert not min(m, n) or C[0:m, 0:n].min() >= 0 + if edge_del_cost: + del_costs = [edge_del_cost(G1.edges[g]) for g in pending_g] + else: + del_costs = [1] * len(pending_g) + # assert not m or min(del_costs) >= 0 + if edge_ins_cost: + ins_costs = [edge_ins_cost(G2.edges[h]) for h in pending_h] + else: + ins_costs = [1] * len(pending_h) + # assert not n or min(ins_costs) >= 0 + inf = C[0:m, 0:n].sum() + sum(del_costs) + sum(ins_costs) + 1 + C[0:m, n : n + m] = np.array( + [del_costs[i] if i == j else inf for i in range(m) for j in range(m)] + ).reshape(m, m) + C[m : m + n, 0:n] = np.array( + [ins_costs[i] if i == j else inf for i in range(n) for j in range(n)] + ).reshape(n, n) + Ce = make_CostMatrix(C, m, n) + + maxcost_value = Cv.C.sum() + Ce.C.sum() + 1 + + if timeout is not None: + if timeout <= 0: + raise nx.NetworkXError("Timeout value must be greater than 0") + start = time.perf_counter() + + def prune(cost): + if timeout is not None: + if time.perf_counter() - start > timeout: + return True + if upper_bound is not None: + if cost > upper_bound: + return True + if cost > maxcost_value: + return True + if strictly_decreasing and cost >= maxcost_value: + return True + return False + + # Now go! + + done_uv = [] if roots is None else [roots] + + for vertex_path, edge_path, cost in get_edit_paths( + done_uv, pending_u, pending_v, Cv, [], pending_g, pending_h, Ce, initial_cost + ): + # assert sorted(G1.nodes) == sorted(u for u, v in vertex_path if u is not None) + # assert sorted(G2.nodes) == sorted(v for u, v in vertex_path if v is not None) + # assert sorted(G1.edges) == sorted(g for g, h in edge_path if g is not None) + # assert sorted(G2.edges) == sorted(h for g, h in edge_path if h is not None) + # print(vertex_path, edge_path, cost, file = sys.stderr) + # assert cost == maxcost_value + yield list(vertex_path), list(edge_path), float(cost) + + +@nx._dispatchable +def simrank_similarity( + G, + source=None, + target=None, + importance_factor=0.9, + max_iterations=1000, + tolerance=1e-4, +): + """Returns the SimRank similarity of nodes in the graph ``G``. + + SimRank is a similarity metric that says "two objects are considered + to be similar if they are referenced by similar objects." [1]_. + + The pseudo-code definition from the paper is:: + + def simrank(G, u, v): + in_neighbors_u = G.predecessors(u) + in_neighbors_v = G.predecessors(v) + scale = C / (len(in_neighbors_u) * len(in_neighbors_v)) + return scale * sum( + simrank(G, w, x) for w, x in product(in_neighbors_u, in_neighbors_v) + ) + + where ``G`` is the graph, ``u`` is the source, ``v`` is the target, + and ``C`` is a float decay or importance factor between 0 and 1. + + The SimRank algorithm for determining node similarity is defined in + [2]_. + + Parameters + ---------- + G : NetworkX graph + A NetworkX graph + + source : node + If this is specified, the returned dictionary maps each node + ``v`` in the graph to the similarity between ``source`` and + ``v``. + + target : node + If both ``source`` and ``target`` are specified, the similarity + value between ``source`` and ``target`` is returned. If + ``target`` is specified but ``source`` is not, this argument is + ignored. + + importance_factor : float + The relative importance of indirect neighbors with respect to + direct neighbors. + + max_iterations : integer + Maximum number of iterations. + + tolerance : float + Error tolerance used to check convergence. When an iteration of + the algorithm finds that no similarity value changes more than + this amount, the algorithm halts. + + Returns + ------- + similarity : dictionary or float + If ``source`` and ``target`` are both ``None``, this returns a + dictionary of dictionaries, where keys are node pairs and value + are similarity of the pair of nodes. + + If ``source`` is not ``None`` but ``target`` is, this returns a + dictionary mapping node to the similarity of ``source`` and that + node. + + If neither ``source`` nor ``target`` is ``None``, this returns + the similarity value for the given pair of nodes. + + Raises + ------ + ExceededMaxIterations + If the algorithm does not converge within ``max_iterations``. + + NodeNotFound + If either ``source`` or ``target`` is not in `G`. + + Examples + -------- + >>> G = nx.cycle_graph(2) + >>> nx.simrank_similarity(G) + {0: {0: 1.0, 1: 0.0}, 1: {0: 0.0, 1: 1.0}} + >>> nx.simrank_similarity(G, source=0) + {0: 1.0, 1: 0.0} + >>> nx.simrank_similarity(G, source=0, target=0) + 1.0 + + The result of this function can be converted to a numpy array + representing the SimRank matrix by using the node order of the + graph to determine which row and column represent each node. + Other ordering of nodes is also possible. + + >>> import numpy as np + >>> sim = nx.simrank_similarity(G) + >>> np.array([[sim[u][v] for v in G] for u in G]) + array([[1., 0.], + [0., 1.]]) + >>> sim_1d = nx.simrank_similarity(G, source=0) + >>> np.array([sim[0][v] for v in G]) + array([1., 0.]) + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/SimRank + .. [2] G. Jeh and J. Widom. + "SimRank: a measure of structural-context similarity", + In KDD'02: Proceedings of the Eighth ACM SIGKDD + International Conference on Knowledge Discovery and Data Mining, + pp. 538--543. ACM Press, 2002. + """ + import numpy as np + + nodelist = list(G) + if source is not None: + if source not in nodelist: + raise nx.NodeNotFound(f"Source node {source} not in G") + else: + s_indx = nodelist.index(source) + else: + s_indx = None + + if target is not None: + if target not in nodelist: + raise nx.NodeNotFound(f"Target node {target} not in G") + else: + t_indx = nodelist.index(target) + else: + t_indx = None + + x = _simrank_similarity_numpy( + G, s_indx, t_indx, importance_factor, max_iterations, tolerance + ) + + if isinstance(x, np.ndarray): + if x.ndim == 1: + return dict(zip(G, x.tolist())) + # else x.ndim == 2 + return {u: dict(zip(G, row)) for u, row in zip(G, x.tolist())} + return float(x) + + +def _simrank_similarity_python( + G, + source=None, + target=None, + importance_factor=0.9, + max_iterations=1000, + tolerance=1e-4, +): + """Returns the SimRank similarity of nodes in the graph ``G``. + + This pure Python version is provided for pedagogical purposes. + + Examples + -------- + >>> G = nx.cycle_graph(2) + >>> nx.similarity._simrank_similarity_python(G) + {0: {0: 1, 1: 0.0}, 1: {0: 0.0, 1: 1}} + >>> nx.similarity._simrank_similarity_python(G, source=0) + {0: 1, 1: 0.0} + >>> nx.similarity._simrank_similarity_python(G, source=0, target=0) + 1 + """ + # build up our similarity adjacency dictionary output + newsim = {u: {v: 1 if u == v else 0 for v in G} for u in G} + + # These functions compute the update to the similarity value of the nodes + # `u` and `v` with respect to the previous similarity values. + def avg_sim(s): + return sum(newsim[w][x] for (w, x) in s) / len(s) if s else 0.0 + + Gadj = G.pred if G.is_directed() else G.adj + + def sim(u, v): + return importance_factor * avg_sim(list(product(Gadj[u], Gadj[v]))) + + for its in range(max_iterations): + oldsim = newsim + newsim = {u: {v: sim(u, v) if u != v else 1 for v in G} for u in G} + is_close = all( + all( + abs(newsim[u][v] - old) <= tolerance * (1 + abs(old)) + for v, old in nbrs.items() + ) + for u, nbrs in oldsim.items() + ) + if is_close: + break + + if its + 1 == max_iterations: + raise nx.ExceededMaxIterations( + f"simrank did not converge after {max_iterations} iterations." + ) + + if source is not None and target is not None: + return newsim[source][target] + if source is not None: + return newsim[source] + return newsim + + +def _simrank_similarity_numpy( + G, + source=None, + target=None, + importance_factor=0.9, + max_iterations=1000, + tolerance=1e-4, +): + """Calculate SimRank of nodes in ``G`` using matrices with ``numpy``. + + The SimRank algorithm for determining node similarity is defined in + [1]_. + + Parameters + ---------- + G : NetworkX graph + A NetworkX graph + + source : node + If this is specified, the returned dictionary maps each node + ``v`` in the graph to the similarity between ``source`` and + ``v``. + + target : node + If both ``source`` and ``target`` are specified, the similarity + value between ``source`` and ``target`` is returned. If + ``target`` is specified but ``source`` is not, this argument is + ignored. + + importance_factor : float + The relative importance of indirect neighbors with respect to + direct neighbors. + + max_iterations : integer + Maximum number of iterations. + + tolerance : float + Error tolerance used to check convergence. When an iteration of + the algorithm finds that no similarity value changes more than + this amount, the algorithm halts. + + Returns + ------- + similarity : numpy array or float + If ``source`` and ``target`` are both ``None``, this returns a + 2D array containing SimRank scores of the nodes. + + If ``source`` is not ``None`` but ``target`` is, this returns an + 1D array containing SimRank scores of ``source`` and that + node. + + If neither ``source`` nor ``target`` is ``None``, this returns + the similarity value for the given pair of nodes. + + Examples + -------- + >>> G = nx.cycle_graph(2) + >>> nx.similarity._simrank_similarity_numpy(G) + array([[1., 0.], + [0., 1.]]) + >>> nx.similarity._simrank_similarity_numpy(G, source=0) + array([1., 0.]) + >>> nx.similarity._simrank_similarity_numpy(G, source=0, target=0) + 1.0 + + References + ---------- + .. [1] G. Jeh and J. Widom. + "SimRank: a measure of structural-context similarity", + In KDD'02: Proceedings of the Eighth ACM SIGKDD + International Conference on Knowledge Discovery and Data Mining, + pp. 538--543. ACM Press, 2002. + """ + # This algorithm follows roughly + # + # S = max{C * (A.T * S * A), I} + # + # where C is the importance factor, A is the column normalized + # adjacency matrix, and I is the identity matrix. + import numpy as np + + adjacency_matrix = nx.to_numpy_array(G) + + # column-normalize the ``adjacency_matrix`` + s = np.array(adjacency_matrix.sum(axis=0)) + s[s == 0] = 1 + adjacency_matrix /= s # adjacency_matrix.sum(axis=0) + + newsim = np.eye(len(G), dtype=np.float64) + for its in range(max_iterations): + prevsim = newsim.copy() + newsim = importance_factor * ((adjacency_matrix.T @ prevsim) @ adjacency_matrix) + np.fill_diagonal(newsim, 1.0) + + if np.allclose(prevsim, newsim, atol=tolerance): + break + + if its + 1 == max_iterations: + raise nx.ExceededMaxIterations( + f"simrank did not converge after {max_iterations} iterations." + ) + + if source is not None and target is not None: + return float(newsim[source, target]) + if source is not None: + return newsim[source] + return newsim + + +@nx._dispatchable(edge_attrs="weight") +def panther_similarity( + G, source, k=5, path_length=5, c=0.5, delta=0.1, eps=None, weight="weight" +): + r"""Returns the Panther similarity of nodes in the graph `G` to node ``v``. + + Panther is a similarity metric that says "two objects are considered + to be similar if they frequently appear on the same paths." [1]_. + + Parameters + ---------- + G : NetworkX graph + A NetworkX graph + source : node + Source node for which to find the top `k` similar other nodes + k : int (default = 5) + The number of most similar nodes to return. + path_length : int (default = 5) + How long the randomly generated paths should be (``T`` in [1]_) + c : float (default = 0.5) + A universal positive constant used to scale the number + of sample random paths to generate. + delta : float (default = 0.1) + The probability that the similarity $S$ is not an epsilon-approximation to (R, phi), + where $R$ is the number of random paths and $\phi$ is the probability + that an element sampled from a set $A \subseteq D$, where $D$ is the domain. + eps : float or None (default = None) + The error bound. Per [1]_, a good value is ``sqrt(1/|E|)``. Therefore, + if no value is provided, the recommended computed value will be used. + weight : string or None, optional (default="weight") + The name of an edge attribute that holds the numerical value + used as a weight. If None then each edge has weight 1. + + Returns + ------- + similarity : dictionary + Dictionary of nodes to similarity scores (as floats). Note: + the self-similarity (i.e., ``v``) will not be included in + the returned dictionary. So, for ``k = 5``, a dictionary of + top 4 nodes and their similarity scores will be returned. + + Raises + ------ + NetworkXUnfeasible + If `source` is an isolated node. + + NodeNotFound + If `source` is not in `G`. + + Notes + ----- + The isolated nodes in `G` are ignored. + + Examples + -------- + >>> G = nx.star_graph(10) + >>> sim = nx.panther_similarity(G, 0) + + References + ---------- + .. [1] Zhang, J., Tang, J., Ma, C., Tong, H., Jing, Y., & Li, J. + Panther: Fast top-k similarity search on large networks. + In Proceedings of the ACM SIGKDD International Conference + on Knowledge Discovery and Data Mining (Vol. 2015-August, pp. 1445–1454). + Association for Computing Machinery. https://doi.org/10.1145/2783258.2783267. + """ + import numpy as np + + if source not in G: + raise nx.NodeNotFound(f"Source node {source} not in G") + + isolates = set(nx.isolates(G)) + + if source in isolates: + raise nx.NetworkXUnfeasible( + f"Panther similarity is not defined for the isolated source node {source}." + ) + + G = G.subgraph([node for node in G.nodes if node not in isolates]).copy() + + num_nodes = G.number_of_nodes() + if num_nodes < k: + warnings.warn( + f"Number of nodes is {num_nodes}, but requested k is {k}. " + "Setting k to number of nodes." + ) + k = num_nodes + # According to [1], they empirically determined + # a good value for ``eps`` to be sqrt( 1 / |E| ) + if eps is None: + eps = np.sqrt(1.0 / G.number_of_edges()) + + inv_node_map = {name: index for index, name in enumerate(G.nodes)} + node_map = np.array(G) + + # Calculate the sample size ``R`` for how many paths + # to randomly generate + t_choose_2 = math.comb(path_length, 2) + sample_size = int((c / eps**2) * (np.log2(t_choose_2) + 1 + np.log(1 / delta))) + index_map = {} + _ = list( + generate_random_paths( + G, sample_size, path_length=path_length, index_map=index_map, weight=weight + ) + ) + S = np.zeros(num_nodes) + + inv_sample_size = 1 / sample_size + + source_paths = set(index_map[source]) + + # Calculate the path similarities + # between ``source`` (v) and ``node`` (v_j) + # using our inverted index mapping of + # vertices to paths + for node, paths in index_map.items(): + # Only consider paths where both + # ``node`` and ``source`` are present + common_paths = source_paths.intersection(paths) + S[inv_node_map[node]] = len(common_paths) * inv_sample_size + + # Retrieve top ``k`` similar + # Note: the below performed anywhere from 4-10x faster + # (depending on input sizes) vs the equivalent ``np.argsort(S)[::-1]`` + top_k_unsorted = np.argpartition(S, -k)[-k:] + top_k_sorted = top_k_unsorted[np.argsort(S[top_k_unsorted])][::-1] + + # Add back the similarity scores + top_k_with_val = dict( + zip(node_map[top_k_sorted].tolist(), S[top_k_sorted].tolist()) + ) + + # Remove the self-similarity + top_k_with_val.pop(source, None) + return top_k_with_val + + +@np_random_state(5) +@nx._dispatchable(edge_attrs="weight") +def generate_random_paths( + G, + sample_size, + path_length=5, + index_map=None, + weight="weight", + seed=None, + *, + source=None, +): + """Randomly generate `sample_size` paths of length `path_length`. + + Parameters + ---------- + G : NetworkX graph + A NetworkX graph + sample_size : integer + The number of paths to generate. This is ``R`` in [1]_. + path_length : integer (default = 5) + The maximum size of the path to randomly generate. + This is ``T`` in [1]_. According to the paper, ``T >= 5`` is + recommended. + index_map : dictionary, optional + If provided, this will be populated with the inverted + index of nodes mapped to the set of generated random path + indices within ``paths``. + weight : string or None, optional (default="weight") + The name of an edge attribute that holds the numerical value + used as a weight. If None then each edge has weight 1. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + source : node, optional + Node to use as the starting point for all generated paths. + If None then starting nodes are selected at random with uniform probability. + + Returns + ------- + paths : generator of lists + Generator of `sample_size` paths each with length `path_length`. + + Examples + -------- + The generator yields `sample_size` number of paths of length `path_length` + drawn from `G`: + + >>> G = nx.complete_graph(5) + >>> next(nx.generate_random_paths(G, sample_size=1, path_length=3, seed=42)) + [3, 4, 2, 3] + >>> list(nx.generate_random_paths(G, sample_size=3, path_length=4, seed=42)) + [[3, 4, 2, 3, 0], [2, 0, 2, 1, 0], [2, 0, 4, 3, 0]] + + By passing a dictionary into `index_map`, it will build an + inverted index mapping of nodes to the paths in which that node is present: + + >>> G = nx.wheel_graph(10) + >>> index_map = {} + >>> random_paths = list( + ... nx.generate_random_paths(G, sample_size=3, index_map=index_map, seed=2771) + ... ) + >>> random_paths + [[3, 2, 1, 9, 8, 7], [4, 0, 5, 6, 7, 8], [3, 0, 5, 0, 9, 8]] + >>> paths_containing_node_0 = [ + ... random_paths[path_idx] for path_idx in index_map.get(0, []) + ... ] + >>> paths_containing_node_0 + [[4, 0, 5, 6, 7, 8], [3, 0, 5, 0, 9, 8]] + + References + ---------- + .. [1] Zhang, J., Tang, J., Ma, C., Tong, H., Jing, Y., & Li, J. + Panther: Fast top-k similarity search on large networks. + In Proceedings of the ACM SIGKDD International Conference + on Knowledge Discovery and Data Mining (Vol. 2015-August, pp. 1445–1454). + Association for Computing Machinery. https://doi.org/10.1145/2783258.2783267. + """ + import numpy as np + + randint_fn = ( + seed.integers if isinstance(seed, np.random.Generator) else seed.randint + ) + + # Calculate transition probabilities between + # every pair of vertices according to Eq. (3) + adj_mat = nx.to_numpy_array(G, weight=weight) + inv_row_sums = np.reciprocal(adj_mat.sum(axis=1)).reshape(-1, 1) + transition_probabilities = adj_mat * inv_row_sums + + node_map = list(G) + num_nodes = G.number_of_nodes() + + for path_index in range(sample_size): + if source is None: + # Sample current vertex v = v_i uniformly at random + node_index = randint_fn(num_nodes) + node = node_map[node_index] + else: + if source not in node_map: + raise nx.NodeNotFound(f"Initial node {source} not in G") + + node = source + node_index = node_map.index(node) + + # Add v into p_r and add p_r into the path set + # of v, i.e., P_v + path = [node] + + # Build the inverted index (P_v) of vertices to paths + if index_map is not None: + if node in index_map: + index_map[node].add(path_index) + else: + index_map[node] = {path_index} + + starting_index = node_index + for _ in range(path_length): + # Randomly sample a neighbor (v_j) according + # to transition probabilities from ``node`` (v) to its neighbors + nbr_index = seed.choice( + num_nodes, p=transition_probabilities[starting_index] + ) + + # Set current vertex (v = v_j) + starting_index = nbr_index + + # Add v into p_r + nbr_node = node_map[nbr_index] + path.append(nbr_node) + + # Add p_r into P_v + if index_map is not None: + if nbr_node in index_map: + index_map[nbr_node].add(path_index) + else: + index_map[nbr_node] = {path_index} + + yield path diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/simple_paths.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/simple_paths.py new file mode 100644 index 0000000..e8cf1d1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/simple_paths.py @@ -0,0 +1,948 @@ +from heapq import heappop, heappush +from itertools import count + +import networkx as nx +from networkx.algorithms.shortest_paths.weighted import _weight_function +from networkx.utils import not_implemented_for, pairwise + +__all__ = [ + "all_simple_paths", + "is_simple_path", + "shortest_simple_paths", + "all_simple_edge_paths", +] + + +@nx._dispatchable +def is_simple_path(G, nodes): + """Returns True if and only if `nodes` form a simple path in `G`. + + A *simple path* in a graph is a nonempty sequence of nodes in which + no node appears more than once in the sequence, and each adjacent + pair of nodes in the sequence is adjacent in the graph. + + Parameters + ---------- + G : graph + A NetworkX graph. + nodes : list + A list of one or more nodes in the graph `G`. + + Returns + ------- + bool + Whether the given list of nodes represents a simple path in `G`. + + Notes + ----- + An empty list of nodes is not a path but a list of one node is a + path. Here's an explanation why. + + This function operates on *node paths*. One could also consider + *edge paths*. There is a bijection between node paths and edge + paths. + + The *length of a path* is the number of edges in the path, so a list + of nodes of length *n* corresponds to a path of length *n* - 1. + Thus the smallest edge path would be a list of zero edges, the empty + path. This corresponds to a list of one node. + + To convert between a node path and an edge path, you can use code + like the following:: + + >>> from networkx.utils import pairwise + >>> nodes = [0, 1, 2, 3] + >>> edges = list(pairwise(nodes)) + >>> edges + [(0, 1), (1, 2), (2, 3)] + >>> nodes = [edges[0][0]] + [v for u, v in edges] + >>> nodes + [0, 1, 2, 3] + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> nx.is_simple_path(G, [2, 3, 0]) + True + >>> nx.is_simple_path(G, [0, 2]) + False + + """ + # The empty list is not a valid path. Could also return + # NetworkXPointlessConcept here. + if len(nodes) == 0: + return False + + # If the list is a single node, just check that the node is actually + # in the graph. + if len(nodes) == 1: + return nodes[0] in G + + # check that all nodes in the list are in the graph, if at least one + # is not in the graph, then this is not a simple path + if not all(n in G for n in nodes): + return False + + # If the list contains repeated nodes, then it's not a simple path + if len(set(nodes)) != len(nodes): + return False + + # Test that each adjacent pair of nodes is adjacent. + return all(v in G[u] for u, v in pairwise(nodes)) + + +@nx._dispatchable +def all_simple_paths(G, source, target, cutoff=None): + """Generate all simple paths in the graph G from source to target. + + A simple path is a path with no repeated nodes. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path + + target : nodes + Single node or iterable of nodes at which to end path + + cutoff : integer, optional + Depth to stop the search. Only paths of length <= cutoff are returned. + + Returns + ------- + path_generator: generator + A generator that produces lists of simple paths. If there are no paths + between the source and target within the given cutoff the generator + produces no output. If it is possible to traverse the same sequence of + nodes in multiple ways, namely through parallel edges, then it will be + returned multiple times (once for each viable edge combination). + + Examples + -------- + This iterator generates lists of nodes:: + + >>> G = nx.complete_graph(4) + >>> for path in nx.all_simple_paths(G, source=0, target=3): + ... print(path) + ... + [0, 1, 2, 3] + [0, 1, 3] + [0, 2, 1, 3] + [0, 2, 3] + [0, 3] + + You can generate only those paths that are shorter than a certain + length by using the `cutoff` keyword argument:: + + >>> paths = nx.all_simple_paths(G, source=0, target=3, cutoff=2) + >>> print(list(paths)) + [[0, 1, 3], [0, 2, 3], [0, 3]] + + To get each path as the corresponding list of edges, you can use the + :func:`networkx.utils.pairwise` helper function:: + + >>> paths = nx.all_simple_paths(G, source=0, target=3) + >>> for path in map(nx.utils.pairwise, paths): + ... print(list(path)) + [(0, 1), (1, 2), (2, 3)] + [(0, 1), (1, 3)] + [(0, 2), (2, 1), (1, 3)] + [(0, 2), (2, 3)] + [(0, 3)] + + Pass an iterable of nodes as target to generate all paths ending in any of several nodes:: + + >>> G = nx.complete_graph(4) + >>> for path in nx.all_simple_paths(G, source=0, target=[3, 2]): + ... print(path) + ... + [0, 1, 2] + [0, 1, 2, 3] + [0, 1, 3] + [0, 1, 3, 2] + [0, 2] + [0, 2, 1, 3] + [0, 2, 3] + [0, 3] + [0, 3, 1, 2] + [0, 3, 2] + + The singleton path from ``source`` to itself is considered a simple path and is + included in the results: + + >>> G = nx.empty_graph(5) + >>> list(nx.all_simple_paths(G, source=0, target=0)) + [[0]] + + >>> G = nx.path_graph(3) + >>> list(nx.all_simple_paths(G, source=0, target={0, 1, 2})) + [[0], [0, 1], [0, 1, 2]] + + Iterate over each path from the root nodes to the leaf nodes in a + directed acyclic graph using a functional programming approach:: + + >>> from itertools import chain + >>> from itertools import product + >>> from itertools import starmap + >>> from functools import partial + >>> + >>> chaini = chain.from_iterable + >>> + >>> G = nx.DiGraph([(0, 1), (1, 2), (0, 3), (3, 2)]) + >>> roots = (v for v, d in G.in_degree() if d == 0) + >>> leaves = (v for v, d in G.out_degree() if d == 0) + >>> all_paths = partial(nx.all_simple_paths, G) + >>> list(chaini(starmap(all_paths, product(roots, leaves)))) + [[0, 1, 2], [0, 3, 2]] + + The same list computed using an iterative approach:: + + >>> G = nx.DiGraph([(0, 1), (1, 2), (0, 3), (3, 2)]) + >>> roots = (v for v, d in G.in_degree() if d == 0) + >>> leaves = (v for v, d in G.out_degree() if d == 0) + >>> all_paths = [] + >>> for root in roots: + ... for leaf in leaves: + ... paths = nx.all_simple_paths(G, root, leaf) + ... all_paths.extend(paths) + >>> all_paths + [[0, 1, 2], [0, 3, 2]] + + Iterate over each path from the root nodes to the leaf nodes in a + directed acyclic graph passing all leaves together to avoid unnecessary + compute:: + + >>> G = nx.DiGraph([(0, 1), (2, 1), (1, 3), (1, 4)]) + >>> roots = (v for v, d in G.in_degree() if d == 0) + >>> leaves = [v for v, d in G.out_degree() if d == 0] + >>> all_paths = [] + >>> for root in roots: + ... paths = nx.all_simple_paths(G, root, leaves) + ... all_paths.extend(paths) + >>> all_paths + [[0, 1, 3], [0, 1, 4], [2, 1, 3], [2, 1, 4]] + + If parallel edges offer multiple ways to traverse a given sequence of + nodes, this sequence of nodes will be returned multiple times: + + >>> G = nx.MultiDiGraph([(0, 1), (0, 1), (1, 2)]) + >>> list(nx.all_simple_paths(G, 0, 2)) + [[0, 1, 2], [0, 1, 2]] + + Notes + ----- + This algorithm uses a modified depth-first search to generate the + paths [1]_. A single path can be found in $O(V+E)$ time but the + number of simple paths in a graph can be very large, e.g. $O(n!)$ in + the complete graph of order $n$. + + This function does not check that a path exists between `source` and + `target`. For large graphs, this may result in very long runtimes. + Consider using `has_path` to check that a path exists between `source` and + `target` before calling this function on large graphs. + + References + ---------- + .. [1] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms", + Addison Wesley Professional, 3rd ed., 2001. + + See Also + -------- + all_shortest_paths, shortest_path, has_path + + """ + for edge_path in all_simple_edge_paths(G, source, target, cutoff): + yield [source] + [edge[1] for edge in edge_path] + + +@nx._dispatchable +def all_simple_edge_paths(G, source, target, cutoff=None): + """Generate lists of edges for all simple paths in G from source to target. + + A simple path is a path with no repeated nodes. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path + + target : nodes + Single node or iterable of nodes at which to end path + + cutoff : integer, optional + Depth to stop the search. Only paths of length <= cutoff are returned. + + Returns + ------- + path_generator: generator + A generator that produces lists of simple paths. If there are no paths + between the source and target within the given cutoff the generator + produces no output. + For multigraphs, the list of edges have elements of the form `(u,v,k)`. + Where `k` corresponds to the edge key. + + Examples + -------- + + Print the simple path edges of a Graph:: + + >>> g = nx.Graph([(1, 2), (2, 4), (1, 3), (3, 4)]) + >>> for path in sorted(nx.all_simple_edge_paths(g, 1, 4)): + ... print(path) + [(1, 2), (2, 4)] + [(1, 3), (3, 4)] + + Print the simple path edges of a MultiGraph. Returned edges come with + their associated keys:: + + >>> mg = nx.MultiGraph() + >>> mg.add_edge(1, 2, key="k0") + 'k0' + >>> mg.add_edge(1, 2, key="k1") + 'k1' + >>> mg.add_edge(2, 3, key="k0") + 'k0' + >>> for path in sorted(nx.all_simple_edge_paths(mg, 1, 3)): + ... print(path) + [(1, 2, 'k0'), (2, 3, 'k0')] + [(1, 2, 'k1'), (2, 3, 'k0')] + + When ``source`` is one of the targets, the empty path starting and ending at + ``source`` without traversing any edge is considered a valid simple edge path + and is included in the results: + + >>> G = nx.Graph() + >>> G.add_node(0) + >>> paths = list(nx.all_simple_edge_paths(G, 0, 0)) + >>> for path in paths: + ... print(path) + [] + >>> len(paths) + 1 + + + Notes + ----- + This algorithm uses a modified depth-first search to generate the + paths [1]_. A single path can be found in $O(V+E)$ time but the + number of simple paths in a graph can be very large, e.g. $O(n!)$ in + the complete graph of order $n$. + + References + ---------- + .. [1] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms", + Addison Wesley Professional, 3rd ed., 2001. + + See Also + -------- + all_shortest_paths, shortest_path, all_simple_paths + + """ + if source not in G: + raise nx.NodeNotFound(f"source node {source} not in graph") + + if target in G: + targets = {target} + else: + try: + targets = set(target) + except TypeError as err: + raise nx.NodeNotFound(f"target node {target} not in graph") from err + + cutoff = cutoff if cutoff is not None else len(G) - 1 + + if cutoff >= 0 and targets: + yield from _all_simple_edge_paths(G, source, targets, cutoff) + + +def _all_simple_edge_paths(G, source, targets, cutoff): + # We simulate recursion with a stack, keeping the current path being explored + # and the outgoing edge iterators at each point in the stack. + # To avoid unnecessary checks, the loop is structured in a way such that a path + # is considered for yielding only after a new node/edge is added. + # We bootstrap the search by adding a dummy iterator to the stack that only yields + # a dummy edge to source (so that the trivial path has a chance of being included). + + get_edges = ( + (lambda node: G.edges(node, keys=True)) + if G.is_multigraph() + else (lambda node: G.edges(node)) + ) + + # The current_path is a dictionary that maps nodes in the path to the edge that was + # used to enter that node (instead of a list of edges) because we want both a fast + # membership test for nodes in the path and the preservation of insertion order. + current_path = {None: None} + stack = [iter([(None, source)])] + + while stack: + # 1. Try to extend the current path. + next_edge = next((e for e in stack[-1] if e[1] not in current_path), None) + if next_edge is None: + # All edges of the last node in the current path have been explored. + stack.pop() + current_path.popitem() + continue + previous_node, next_node, *_ = next_edge + + # 2. Check if we've reached a target. + if next_node in targets: + yield (list(current_path.values()) + [next_edge])[2:] # remove dummy edge + + # 3. Only expand the search through the next node if it makes sense. + if len(current_path) - 1 < cutoff and ( + targets - current_path.keys() - {next_node} + ): + current_path[next_node] = next_edge + stack.append(iter(get_edges(next_node))) + + +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def shortest_simple_paths(G, source, target, weight=None): + """Generate all simple paths in the graph G from source to target, + starting from shortest ones. + + A simple path is a path with no repeated nodes. + + If a weighted shortest path search is to be used, no negative weights + are allowed. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for path + + target : node + Ending node for path + + weight : string or function + If it is a string, it is the name of the edge attribute to be + used as a weight. + + If it is a function, the weight of an edge is the value returned + by the function. The function must accept exactly three positional + arguments: the two endpoints of an edge and the dictionary of edge + attributes for that edge. The function must return a number or None. + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + If None all edges are considered to have unit weight. Default + value None. + + Returns + ------- + path_generator: generator + A generator that produces lists of simple paths, in order from + shortest to longest. + + Raises + ------ + NetworkXNoPath + If no path exists between source and target. + + NetworkXError + If source or target nodes are not in the input graph. + + NetworkXNotImplemented + If the input graph is a Multi[Di]Graph. + + Examples + -------- + + >>> G = nx.cycle_graph(7) + >>> paths = list(nx.shortest_simple_paths(G, 0, 3)) + >>> print(paths) + [[0, 1, 2, 3], [0, 6, 5, 4, 3]] + + You can use this function to efficiently compute the k shortest/best + paths between two nodes. + + >>> from itertools import islice + >>> def k_shortest_paths(G, source, target, k, weight=None): + ... return list( + ... islice(nx.shortest_simple_paths(G, source, target, weight=weight), k) + ... ) + >>> for path in k_shortest_paths(G, 0, 3, 2): + ... print(path) + [0, 1, 2, 3] + [0, 6, 5, 4, 3] + + Notes + ----- + This procedure is based on algorithm by Jin Y. Yen [1]_. Finding + the first $K$ paths requires $O(KN^3)$ operations. + + See Also + -------- + all_shortest_paths + shortest_path + all_simple_paths + + References + ---------- + .. [1] Jin Y. Yen, "Finding the K Shortest Loopless Paths in a + Network", Management Science, Vol. 17, No. 11, Theory Series + (Jul., 1971), pp. 712-716. + + """ + if source not in G: + raise nx.NodeNotFound(f"source node {source} not in graph") + + if target not in G: + raise nx.NodeNotFound(f"target node {target} not in graph") + + if weight is None: + length_func = len + shortest_path_func = _bidirectional_shortest_path + else: + wt = _weight_function(G, weight) + + def length_func(path): + return sum( + wt(u, v, G.get_edge_data(u, v)) for (u, v) in zip(path, path[1:]) + ) + + shortest_path_func = _bidirectional_dijkstra + + listA = [] + listB = PathBuffer() + prev_path = None + while True: + if not prev_path: + length, path = shortest_path_func(G, source, target, weight=weight) + listB.push(length, path) + else: + ignore_nodes = set() + ignore_edges = set() + for i in range(1, len(prev_path)): + root = prev_path[:i] + root_length = length_func(root) + for path in listA: + if path[:i] == root: + ignore_edges.add((path[i - 1], path[i])) + try: + length, spur = shortest_path_func( + G, + root[-1], + target, + ignore_nodes=ignore_nodes, + ignore_edges=ignore_edges, + weight=weight, + ) + path = root[:-1] + spur + listB.push(root_length + length, path) + except nx.NetworkXNoPath: + pass + ignore_nodes.add(root[-1]) + + if listB: + path = listB.pop() + yield path + listA.append(path) + prev_path = path + else: + break + + +class PathBuffer: + def __init__(self): + self.paths = set() + self.sortedpaths = [] + self.counter = count() + + def __len__(self): + return len(self.sortedpaths) + + def push(self, cost, path): + hashable_path = tuple(path) + if hashable_path not in self.paths: + heappush(self.sortedpaths, (cost, next(self.counter), path)) + self.paths.add(hashable_path) + + def pop(self): + (cost, num, path) = heappop(self.sortedpaths) + hashable_path = tuple(path) + self.paths.remove(hashable_path) + return path + + +def _bidirectional_shortest_path( + G, source, target, ignore_nodes=None, ignore_edges=None, weight=None +): + """Returns the shortest path between source and target ignoring + nodes and edges in the containers ignore_nodes and ignore_edges. + + This is a custom modification of the standard bidirectional shortest + path implementation at networkx.algorithms.unweighted + + Parameters + ---------- + G : NetworkX graph + + source : node + starting node for path + + target : node + ending node for path + + ignore_nodes : container of nodes + nodes to ignore, optional + + ignore_edges : container of edges + edges to ignore, optional + + weight : None + This function accepts a weight argument for convenience of + shortest_simple_paths function. It will be ignored. + + Returns + ------- + path: list + List of nodes in a path from source to target. + + Raises + ------ + NetworkXNoPath + If no path exists between source and target. + + See Also + -------- + shortest_path + + """ + # call helper to do the real work + results = _bidirectional_pred_succ(G, source, target, ignore_nodes, ignore_edges) + pred, succ, w = results + + # build path from pred+w+succ + path = [] + # from w to target + while w is not None: + path.append(w) + w = succ[w] + # from source to w + w = pred[path[0]] + while w is not None: + path.insert(0, w) + w = pred[w] + + return len(path), path + + +def _bidirectional_pred_succ(G, source, target, ignore_nodes=None, ignore_edges=None): + """Bidirectional shortest path helper. + Returns (pred,succ,w) where + pred is a dictionary of predecessors from w to the source, and + succ is a dictionary of successors from w to the target. + """ + # does BFS from both source and target and meets in the middle + if ignore_nodes and (source in ignore_nodes or target in ignore_nodes): + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") + if target == source: + return ({target: None}, {source: None}, source) + + # handle either directed or undirected + if G.is_directed(): + Gpred = G.predecessors + Gsucc = G.successors + else: + Gpred = G.neighbors + Gsucc = G.neighbors + + # support optional nodes filter + if ignore_nodes: + + def filter_iter(nodes): + def iterate(v): + for w in nodes(v): + if w not in ignore_nodes: + yield w + + return iterate + + Gpred = filter_iter(Gpred) + Gsucc = filter_iter(Gsucc) + + # support optional edges filter + if ignore_edges: + if G.is_directed(): + + def filter_pred_iter(pred_iter): + def iterate(v): + for w in pred_iter(v): + if (w, v) not in ignore_edges: + yield w + + return iterate + + def filter_succ_iter(succ_iter): + def iterate(v): + for w in succ_iter(v): + if (v, w) not in ignore_edges: + yield w + + return iterate + + Gpred = filter_pred_iter(Gpred) + Gsucc = filter_succ_iter(Gsucc) + + else: + + def filter_iter(nodes): + def iterate(v): + for w in nodes(v): + if (v, w) not in ignore_edges and (w, v) not in ignore_edges: + yield w + + return iterate + + Gpred = filter_iter(Gpred) + Gsucc = filter_iter(Gsucc) + + # predecessor and successors in search + pred = {source: None} + succ = {target: None} + + # initialize fringes, start with forward + forward_fringe = [source] + reverse_fringe = [target] + + while forward_fringe and reverse_fringe: + if len(forward_fringe) <= len(reverse_fringe): + this_level = forward_fringe + forward_fringe = [] + for v in this_level: + for w in Gsucc(v): + if w not in pred: + forward_fringe.append(w) + pred[w] = v + if w in succ: + # found path + return pred, succ, w + else: + this_level = reverse_fringe + reverse_fringe = [] + for v in this_level: + for w in Gpred(v): + if w not in succ: + succ[w] = v + reverse_fringe.append(w) + if w in pred: + # found path + return pred, succ, w + + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") + + +def _bidirectional_dijkstra( + G, source, target, weight="weight", ignore_nodes=None, ignore_edges=None +): + """Dijkstra's algorithm for shortest paths using bidirectional search. + + This function returns the shortest path between source and target + ignoring nodes and edges in the containers ignore_nodes and + ignore_edges. + + This is a custom modification of the standard Dijkstra bidirectional + shortest path implementation at networkx.algorithms.weighted + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node. + + target : node + Ending node. + + weight: string, function, optional (default='weight') + Edge data key or weight function corresponding to the edge weight + If this is a function, the weight of an edge is the value + returned by the function. The function must accept exactly three + positional arguments: the two endpoints of an edge and the + dictionary of edge attributes for that edge. The function must + return a number or None to indicate a hidden edge. + + ignore_nodes : container of nodes + nodes to ignore, optional + + ignore_edges : container of edges + edges to ignore, optional + + Returns + ------- + length : number + Shortest path length. + + Returns a tuple of two dictionaries keyed by node. + The first dictionary stores distance from the source. + The second stores the path from the source to that node. + + Raises + ------ + NetworkXNoPath + If no path exists between source and target. + + Notes + ----- + Edge weight attributes must be numerical. + Distances are calculated as sums of weighted edges traversed. + + The weight function can be used to hide edges by returning None. + So ``weight = lambda u, v, d: 1 if d['color']=="red" else None`` + will find the shortest red path. + + In practice bidirectional Dijkstra is much more than twice as fast as + ordinary Dijkstra. + + Ordinary Dijkstra expands nodes in a sphere-like manner from the + source. The radius of this sphere will eventually be the length + of the shortest path. Bidirectional Dijkstra will expand nodes + from both the source and the target, making two spheres of half + this radius. Volume of the first sphere is pi*r*r while the + others are 2*pi*r/2*r/2, making up half the volume. + + This algorithm is not guaranteed to work if edge weights + are negative or are floating point numbers + (overflows and roundoff errors can cause problems). + + See Also + -------- + shortest_path + shortest_path_length + """ + if ignore_nodes and (source in ignore_nodes or target in ignore_nodes): + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") + if source == target: + if source not in G: + raise nx.NodeNotFound(f"Node {source} not in graph") + return (0, [source]) + + # handle either directed or undirected + if G.is_directed(): + Gpred = G.predecessors + Gsucc = G.successors + else: + Gpred = G.neighbors + Gsucc = G.neighbors + + # support optional nodes filter + if ignore_nodes: + + def filter_iter(nodes): + def iterate(v): + for w in nodes(v): + if w not in ignore_nodes: + yield w + + return iterate + + Gpred = filter_iter(Gpred) + Gsucc = filter_iter(Gsucc) + + # support optional edges filter + if ignore_edges: + if G.is_directed(): + + def filter_pred_iter(pred_iter): + def iterate(v): + for w in pred_iter(v): + if (w, v) not in ignore_edges: + yield w + + return iterate + + def filter_succ_iter(succ_iter): + def iterate(v): + for w in succ_iter(v): + if (v, w) not in ignore_edges: + yield w + + return iterate + + Gpred = filter_pred_iter(Gpred) + Gsucc = filter_succ_iter(Gsucc) + + else: + + def filter_iter(nodes): + def iterate(v): + for w in nodes(v): + if (v, w) not in ignore_edges and (w, v) not in ignore_edges: + yield w + + return iterate + + Gpred = filter_iter(Gpred) + Gsucc = filter_iter(Gsucc) + + wt = _weight_function(G, weight) + # Init: Forward Backward + dists = [{}, {}] # dictionary of final distances + paths = [{source: [source]}, {target: [target]}] # dictionary of paths + fringe = [[], []] # heap of (distance, node) tuples for + # extracting next node to expand + seen = [{source: 0}, {target: 0}] # dictionary of distances to + # nodes seen + c = count() + # initialize fringe heap + heappush(fringe[0], (0, next(c), source)) + heappush(fringe[1], (0, next(c), target)) + # neighs for extracting correct neighbor information + neighs = [Gsucc, Gpred] + # variables to hold shortest discovered path + # finaldist = 1e30000 + finalpath = [] + dir = 1 + while fringe[0] and fringe[1]: + # choose direction + # dir == 0 is forward direction and dir == 1 is back + dir = 1 - dir + # extract closest to expand + (dist, _, v) = heappop(fringe[dir]) + if v in dists[dir]: + # Shortest path to v has already been found + continue + # update distance + dists[dir][v] = dist # equal to seen[dir][v] + if v in dists[1 - dir]: + # if we have scanned v in both directions we are done + # we have now discovered the shortest path + return (finaldist, finalpath) + + for w in neighs[dir](v): + if dir == 0: # forward + minweight = wt(v, w, G.get_edge_data(v, w)) + else: # back, must remember to change v,w->w,v + minweight = wt(w, v, G.get_edge_data(w, v)) + if minweight is None: + continue + vwLength = dists[dir][v] + minweight + + if w in dists[dir]: + if vwLength < dists[dir][w]: + raise ValueError("Contradictory paths found: negative weights?") + elif w not in seen[dir] or vwLength < seen[dir][w]: + # relaxing + seen[dir][w] = vwLength + heappush(fringe[dir], (vwLength, next(c), w)) + paths[dir][w] = paths[dir][v] + [w] + if w in seen[0] and w in seen[1]: + # see if this path is better than the already + # discovered shortest path + totaldist = seen[0][w] + seen[1][w] + if finalpath == [] or finaldist > totaldist: + finaldist = totaldist + revpath = paths[1][w][:] + revpath.reverse() + finalpath = paths[0][w] + revpath[1:] + raise nx.NetworkXNoPath(f"No path between {source} and {target}.") diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/smallworld.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/smallworld.py new file mode 100644 index 0000000..456a4ca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/smallworld.py @@ -0,0 +1,404 @@ +"""Functions for estimating the small-world-ness of graphs. + +A small world network is characterized by a small average shortest path length, +and a large clustering coefficient. + +Small-worldness is commonly measured with the coefficient sigma or omega. + +Both coefficients compare the average clustering coefficient and shortest path +length of a given graph against the same quantities for an equivalent random +or lattice graph. + +For more information, see the Wikipedia article on small-world network [1]_. + +.. [1] Small-world network:: https://en.wikipedia.org/wiki/Small-world_network + +""" + +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = ["random_reference", "lattice_reference", "sigma", "omega"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(3) +@nx._dispatchable(returns_graph=True) +def random_reference(G, niter=1, connectivity=True, seed=None): + """Compute a random graph by swapping edges of a given graph. + + Parameters + ---------- + G : graph + An undirected graph with 4 or more nodes. + + niter : integer (optional, default=1) + An edge is rewired approximately `niter` times. + + connectivity : boolean (optional, default=True) + When True, ensure connectivity for the randomized graph. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : graph + The randomized graph. + + Raises + ------ + NetworkXError + If there are fewer than 4 nodes or 2 edges in `G` + + Notes + ----- + The implementation is adapted from the algorithm by Maslov and Sneppen + (2002) [1]_. + + References + ---------- + .. [1] Maslov, Sergei, and Kim Sneppen. + "Specificity and stability in topology of protein networks." + Science 296.5569 (2002): 910-913. + """ + if len(G) < 4: + raise nx.NetworkXError("Graph has fewer than four nodes.") + if len(G.edges) < 2: + raise nx.NetworkXError("Graph has fewer that 2 edges") + + from networkx.utils import cumulative_distribution, discrete_sequence + + local_conn = nx.connectivity.local_edge_connectivity + + G = G.copy() + keys, degrees = zip(*G.degree()) # keys, degree + cdf = cumulative_distribution(degrees) # cdf of degree + nnodes = len(G) + nedges = nx.number_of_edges(G) + niter = niter * nedges + ntries = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2)) + swapcount = 0 + + for i in range(niter): + n = 0 + while n < ntries: + # pick two random edges without creating edge list + # choose source node indices from discrete distribution + (ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed) + if ai == ci: + continue # same source, skip + a = keys[ai] # convert index to label + c = keys[ci] + # choose target uniformly from neighbors + b = seed.choice(list(G.neighbors(a))) + d = seed.choice(list(G.neighbors(c))) + if b in [a, c, d] or d in [a, b, c]: + continue # all vertices should be different + + # don't create parallel edges + if (d not in G[a]) and (b not in G[c]): + G.add_edge(a, d) + G.add_edge(c, b) + G.remove_edge(a, b) + G.remove_edge(c, d) + + # Check if the graph is still connected + if connectivity and local_conn(G, a, b) == 0: + # Not connected, revert the swap + G.remove_edge(a, d) + G.remove_edge(c, b) + G.add_edge(a, b) + G.add_edge(c, d) + else: + swapcount += 1 + break + n += 1 + return G + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(4) +@nx._dispatchable(returns_graph=True) +def lattice_reference(G, niter=5, D=None, connectivity=True, seed=None): + """Latticize the given graph by swapping edges. + + Parameters + ---------- + G : graph + An undirected graph. + + niter : integer (optional, default=1) + An edge is rewired approximately niter times. + + D : numpy.array (optional, default=None) + Distance to the diagonal matrix. + + connectivity : boolean (optional, default=True) + Ensure connectivity for the latticized graph when set to True. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : graph + The latticized graph. + + Raises + ------ + NetworkXError + If there are fewer than 4 nodes or 2 edges in `G` + + Notes + ----- + The implementation is adapted from the algorithm by Sporns et al. [1]_. + which is inspired from the original work by Maslov and Sneppen(2002) [2]_. + + References + ---------- + .. [1] Sporns, Olaf, and Jonathan D. Zwi. + "The small world of the cerebral cortex." + Neuroinformatics 2.2 (2004): 145-162. + .. [2] Maslov, Sergei, and Kim Sneppen. + "Specificity and stability in topology of protein networks." + Science 296.5569 (2002): 910-913. + """ + import numpy as np + + from networkx.utils import cumulative_distribution, discrete_sequence + + local_conn = nx.connectivity.local_edge_connectivity + + if len(G) < 4: + raise nx.NetworkXError("Graph has fewer than four nodes.") + if len(G.edges) < 2: + raise nx.NetworkXError("Graph has fewer that 2 edges") + # Instead of choosing uniformly at random from a generated edge list, + # this algorithm chooses nonuniformly from the set of nodes with + # probability weighted by degree. + G = G.copy() + keys, degrees = zip(*G.degree()) # keys, degree + cdf = cumulative_distribution(degrees) # cdf of degree + + nnodes = len(G) + nedges = nx.number_of_edges(G) + if D is None: + D = np.zeros((nnodes, nnodes)) + un = np.arange(1, nnodes) + um = np.arange(nnodes - 1, 0, -1) + u = np.append((0,), np.where(un < um, un, um)) + + for v in range(int(np.ceil(nnodes / 2))): + D[nnodes - v - 1, :] = np.append(u[v + 1 :], u[: v + 1]) + D[v, :] = D[nnodes - v - 1, :][::-1] + + niter = niter * nedges + # maximal number of rewiring attempts per 'niter' + max_attempts = int(nnodes * nedges / (nnodes * (nnodes - 1) / 2)) + + for _ in range(niter): + n = 0 + while n < max_attempts: + # pick two random edges without creating edge list + # choose source node indices from discrete distribution + (ai, ci) = discrete_sequence(2, cdistribution=cdf, seed=seed) + if ai == ci: + continue # same source, skip + a = keys[ai] # convert index to label + c = keys[ci] + # choose target uniformly from neighbors + b = seed.choice(list(G.neighbors(a))) + d = seed.choice(list(G.neighbors(c))) + bi = keys.index(b) + di = keys.index(d) + + if b in [a, c, d] or d in [a, b, c]: + continue # all vertices should be different + + # don't create parallel edges + if (d not in G[a]) and (b not in G[c]): + if D[ai, bi] + D[ci, di] >= D[ai, ci] + D[bi, di]: + # only swap if we get closer to the diagonal + G.add_edge(a, d) + G.add_edge(c, b) + G.remove_edge(a, b) + G.remove_edge(c, d) + + # Check if the graph is still connected + if connectivity and local_conn(G, a, b) == 0: + # Not connected, revert the swap + G.remove_edge(a, d) + G.remove_edge(c, b) + G.add_edge(a, b) + G.add_edge(c, d) + else: + break + n += 1 + + return G + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(3) +@nx._dispatchable +def sigma(G, niter=100, nrand=10, seed=None): + """Returns the small-world coefficient (sigma) of the given graph. + + The small-world coefficient is defined as: + sigma = C/Cr / L/Lr + where C and L are respectively the average clustering coefficient and + average shortest path length of G. Cr and Lr are respectively the average + clustering coefficient and average shortest path length of an equivalent + random graph. + + A graph is commonly classified as small-world if sigma>1. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + niter : integer (optional, default=100) + Approximate number of rewiring per edge to compute the equivalent + random graph. + nrand : integer (optional, default=10) + Number of random graphs generated to compute the average clustering + coefficient (Cr) and average shortest path length (Lr). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + sigma : float + The small-world coefficient of G. + + Notes + ----- + The implementation is adapted from Humphries et al. [1]_ [2]_. + + References + ---------- + .. [1] The brainstem reticular formation is a small-world, not scale-free, + network M. D. Humphries, K. Gurney and T. J. Prescott, + Proc. Roy. Soc. B 2006 273, 503-511, doi:10.1098/rspb.2005.3354. + .. [2] Humphries and Gurney (2008). + "Network 'Small-World-Ness': A Quantitative Method for Determining + Canonical Network Equivalence". + PLoS One. 3 (4). PMID 18446219. doi:10.1371/journal.pone.0002051. + """ + import numpy as np + + # Compute the mean clustering coefficient and average shortest path length + # for an equivalent random graph + randMetrics = {"C": [], "L": []} + for i in range(nrand): + Gr = random_reference(G, niter=niter, seed=seed) + randMetrics["C"].append(nx.transitivity(Gr)) + randMetrics["L"].append(nx.average_shortest_path_length(Gr)) + + C = nx.transitivity(G) + L = nx.average_shortest_path_length(G) + Cr = np.mean(randMetrics["C"]) + Lr = np.mean(randMetrics["L"]) + + sigma = (C / Cr) / (L / Lr) + + return float(sigma) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(3) +@nx._dispatchable +def omega(G, niter=5, nrand=10, seed=None): + """Returns the small-world coefficient (omega) of a graph + + The small-world coefficient of a graph G is: + + omega = Lr/L - C/Cl + + where C and L are respectively the average clustering coefficient and + average shortest path length of G. Lr is the average shortest path length + of an equivalent random graph and Cl is the average clustering coefficient + of an equivalent lattice graph. + + The small-world coefficient (omega) measures how much G is like a lattice + or a random graph. Negative values mean G is similar to a lattice whereas + positive values mean G is a random graph. + Values close to 0 mean that G has small-world characteristics. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + niter: integer (optional, default=5) + Approximate number of rewiring per edge to compute the equivalent + random graph. + + nrand: integer (optional, default=10) + Number of random graphs generated to compute the maximal clustering + coefficient (Cr) and average shortest path length (Lr). + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + + Returns + ------- + omega : float + The small-world coefficient (omega) + + Notes + ----- + The implementation is adapted from the algorithm by Telesford et al. [1]_. + + References + ---------- + .. [1] Telesford, Joyce, Hayasaka, Burdette, and Laurienti (2011). + "The Ubiquity of Small-World Networks". + Brain Connectivity. 1 (0038): 367-75. PMC 3604768. PMID 22432451. + doi:10.1089/brain.2011.0038. + """ + import numpy as np + + # Compute the mean clustering coefficient and average shortest path length + # for an equivalent random graph + randMetrics = {"C": [], "L": []} + + # Calculate initial average clustering coefficient which potentially will + # get replaced by higher clustering coefficients from generated lattice + # reference graphs + Cl = nx.average_clustering(G) + + niter_lattice_reference = niter + niter_random_reference = niter * 2 + + for _ in range(nrand): + # Generate random graph + Gr = random_reference(G, niter=niter_random_reference, seed=seed) + randMetrics["L"].append(nx.average_shortest_path_length(Gr)) + + # Generate lattice graph + Gl = lattice_reference(G, niter=niter_lattice_reference, seed=seed) + + # Replace old clustering coefficient, if clustering is higher in + # generated lattice reference + Cl_temp = nx.average_clustering(Gl) + if Cl_temp > Cl: + Cl = Cl_temp + + C = nx.average_clustering(G) + L = nx.average_shortest_path_length(G) + Lr = np.mean(randMetrics["L"]) + + omega = (Lr / L) - (C / Cl) + + return float(omega) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/smetric.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/smetric.py new file mode 100644 index 0000000..d985aa8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/smetric.py @@ -0,0 +1,30 @@ +import networkx as nx + +__all__ = ["s_metric"] + + +@nx._dispatchable +def s_metric(G): + """Returns the s-metric [1]_ of graph. + + The s-metric is defined as the sum of the products ``deg(u) * deg(v)`` + for every edge ``(u, v)`` in `G`. + + Parameters + ---------- + G : graph + The graph used to compute the s-metric. + + Returns + ------- + s : float + The s-metric of the graph. + + References + ---------- + .. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger, + Towards a Theory of Scale-Free Graphs: + Definition, Properties, and Implications (Extended Version), 2005. + https://arxiv.org/abs/cond-mat/0501169 + """ + return float(sum(G.degree(u) * G.degree(v) for (u, v) in G.edges())) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/sparsifiers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/sparsifiers.py new file mode 100644 index 0000000..5932237 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/sparsifiers.py @@ -0,0 +1,296 @@ +"""Functions for computing sparsifiers of graphs.""" + +import math + +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = ["spanner"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@py_random_state(3) +@nx._dispatchable(edge_attrs="weight", returns_graph=True) +def spanner(G, stretch, weight=None, seed=None): + """Returns a spanner of the given graph with the given stretch. + + A spanner of a graph G = (V, E) with stretch t is a subgraph + H = (V, E_S) such that E_S is a subset of E and the distance between + any pair of nodes in H is at most t times the distance between the + nodes in G. + + Parameters + ---------- + G : NetworkX graph + An undirected simple graph. + + stretch : float + The stretch of the spanner. + + weight : object + The edge attribute to use as distance. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + NetworkX graph + A spanner of the given graph with the given stretch. + + Raises + ------ + ValueError + If a stretch less than 1 is given. + + Notes + ----- + This function implements the spanner algorithm by Baswana and Sen, + see [1]. + + This algorithm is a randomized las vegas algorithm: The expected + running time is O(km) where k = (stretch + 1) // 2 and m is the + number of edges in G. The returned graph is always a spanner of the + given graph with the specified stretch. For weighted graphs the + number of edges in the spanner is O(k * n^(1 + 1 / k)) where k is + defined as above and n is the number of nodes in G. For unweighted + graphs the number of edges is O(n^(1 + 1 / k) + kn). + + References + ---------- + [1] S. Baswana, S. Sen. A Simple and Linear Time Randomized + Algorithm for Computing Sparse Spanners in Weighted Graphs. + Random Struct. Algorithms 30(4): 532-563 (2007). + """ + if stretch < 1: + raise ValueError("stretch must be at least 1") + + k = (stretch + 1) // 2 + + # initialize spanner H with empty edge set + H = nx.empty_graph() + H.add_nodes_from(G.nodes) + + # phase 1: forming the clusters + # the residual graph has V' from the paper as its node set + # and E' from the paper as its edge set + residual_graph = _setup_residual_graph(G, weight) + # clustering is a dictionary that maps nodes in a cluster to the + # cluster center + clustering = {v: v for v in G.nodes} + sample_prob = math.pow(G.number_of_nodes(), -1 / k) + size_limit = 2 * math.pow(G.number_of_nodes(), 1 + 1 / k) + + i = 0 + while i < k - 1: + # step 1: sample centers + sampled_centers = set() + for center in set(clustering.values()): + if seed.random() < sample_prob: + sampled_centers.add(center) + + # combined loop for steps 2 and 3 + edges_to_add = set() + edges_to_remove = set() + new_clustering = {} + for v in residual_graph.nodes: + if clustering[v] in sampled_centers: + continue + + # step 2: find neighboring (sampled) clusters and + # lightest edges to them + lightest_edge_neighbor, lightest_edge_weight = _lightest_edge_dicts( + residual_graph, clustering, v + ) + neighboring_sampled_centers = ( + set(lightest_edge_weight.keys()) & sampled_centers + ) + + # step 3: add edges to spanner + if not neighboring_sampled_centers: + # connect to each neighboring center via lightest edge + for neighbor in lightest_edge_neighbor.values(): + edges_to_add.add((v, neighbor)) + # remove all incident edges + for neighbor in residual_graph.adj[v]: + edges_to_remove.add((v, neighbor)) + + else: # there is a neighboring sampled center + closest_center = min( + neighboring_sampled_centers, key=lightest_edge_weight.get + ) + closest_center_weight = lightest_edge_weight[closest_center] + closest_center_neighbor = lightest_edge_neighbor[closest_center] + + edges_to_add.add((v, closest_center_neighbor)) + new_clustering[v] = closest_center + + # connect to centers with edge weight less than + # closest_center_weight + for center, edge_weight in lightest_edge_weight.items(): + if edge_weight < closest_center_weight: + neighbor = lightest_edge_neighbor[center] + edges_to_add.add((v, neighbor)) + + # remove edges to centers with edge weight less than + # closest_center_weight + for neighbor in residual_graph.adj[v]: + nbr_cluster = clustering[neighbor] + nbr_weight = lightest_edge_weight[nbr_cluster] + if ( + nbr_cluster == closest_center + or nbr_weight < closest_center_weight + ): + edges_to_remove.add((v, neighbor)) + + # check whether iteration added too many edges to spanner, + # if so repeat + if len(edges_to_add) > size_limit: + # an iteration is repeated O(1) times on expectation + continue + + # iteration succeeded + i = i + 1 + + # actually add edges to spanner + for u, v in edges_to_add: + _add_edge_to_spanner(H, residual_graph, u, v, weight) + + # actually delete edges from residual graph + residual_graph.remove_edges_from(edges_to_remove) + + # copy old clustering data to new_clustering + for node, center in clustering.items(): + if center in sampled_centers: + new_clustering[node] = center + clustering = new_clustering + + # step 4: remove intra-cluster edges + for u in residual_graph.nodes: + for v in list(residual_graph.adj[u]): + if clustering[u] == clustering[v]: + residual_graph.remove_edge(u, v) + + # update residual graph node set + for v in list(residual_graph.nodes): + if v not in clustering: + residual_graph.remove_node(v) + + # phase 2: vertex-cluster joining + for v in residual_graph.nodes: + lightest_edge_neighbor, _ = _lightest_edge_dicts(residual_graph, clustering, v) + for neighbor in lightest_edge_neighbor.values(): + _add_edge_to_spanner(H, residual_graph, v, neighbor, weight) + + return H + + +def _setup_residual_graph(G, weight): + """Setup residual graph as a copy of G with unique edges weights. + + The node set of the residual graph corresponds to the set V' from + the Baswana-Sen paper and the edge set corresponds to the set E' + from the paper. + + This function associates distinct weights to the edges of the + residual graph (even for unweighted input graphs), as required by + the algorithm. + + Parameters + ---------- + G : NetworkX graph + An undirected simple graph. + + weight : object + The edge attribute to use as distance. + + Returns + ------- + NetworkX graph + The residual graph used for the Baswana-Sen algorithm. + """ + residual_graph = G.copy() + + # establish unique edge weights, even for unweighted graphs + for u, v in G.edges(): + if not weight: + residual_graph[u][v]["weight"] = (id(u), id(v)) + else: + residual_graph[u][v]["weight"] = (G[u][v][weight], id(u), id(v)) + + return residual_graph + + +def _lightest_edge_dicts(residual_graph, clustering, node): + """Find the lightest edge to each cluster. + + Searches for the minimum-weight edge to each cluster adjacent to + the given node. + + Parameters + ---------- + residual_graph : NetworkX graph + The residual graph used by the Baswana-Sen algorithm. + + clustering : dictionary + The current clustering of the nodes. + + node : node + The node from which the search originates. + + Returns + ------- + lightest_edge_neighbor, lightest_edge_weight : dictionary, dictionary + lightest_edge_neighbor is a dictionary that maps a center C to + a node v in the corresponding cluster such that the edge from + the given node to v is the lightest edge from the given node to + any node in cluster. lightest_edge_weight maps a center C to the + weight of the aforementioned edge. + + Notes + ----- + If a cluster has no node that is adjacent to the given node in the + residual graph then the center of the cluster is not a key in the + returned dictionaries. + """ + lightest_edge_neighbor = {} + lightest_edge_weight = {} + for neighbor in residual_graph.adj[node]: + nbr_center = clustering[neighbor] + weight = residual_graph[node][neighbor]["weight"] + if ( + nbr_center not in lightest_edge_weight + or weight < lightest_edge_weight[nbr_center] + ): + lightest_edge_neighbor[nbr_center] = neighbor + lightest_edge_weight[nbr_center] = weight + return lightest_edge_neighbor, lightest_edge_weight + + +def _add_edge_to_spanner(H, residual_graph, u, v, weight): + """Add the edge {u, v} to the spanner H and take weight from + the residual graph. + + Parameters + ---------- + H : NetworkX graph + The spanner under construction. + + residual_graph : NetworkX graph + The residual graph used by the Baswana-Sen algorithm. The weight + for the edge is taken from this graph. + + u : node + One endpoint of the edge. + + v : node + The other endpoint of the edge. + + weight : object + The edge attribute to use as distance. + """ + H.add_edge(u, v) + if weight: + H[u][v][weight] = residual_graph[u][v]["weight"][0] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/structuralholes.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/structuralholes.py new file mode 100644 index 0000000..f43c58c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/structuralholes.py @@ -0,0 +1,374 @@ +"""Functions for computing measures of structural holes.""" + +import networkx as nx + +__all__ = ["constraint", "local_constraint", "effective_size"] + + +@nx._dispatchable(edge_attrs="weight") +def mutual_weight(G, u, v, weight=None): + """Returns the sum of the weights of the edge from `u` to `v` and + the edge from `v` to `u` in `G`. + + `weight` is the edge data key that represents the edge weight. If + the specified key is `None` or is not in the edge data for an edge, + that edge is assumed to have weight 1. + + Pre-conditions: `u` and `v` must both be in `G`. + + """ + try: + a_uv = G[u][v].get(weight, 1) + except KeyError: + a_uv = 0 + try: + a_vu = G[v][u].get(weight, 1) + except KeyError: + a_vu = 0 + return a_uv + a_vu + + +@nx._dispatchable(edge_attrs="weight") +def normalized_mutual_weight(G, u, v, norm=sum, weight=None): + """Returns normalized mutual weight of the edges from `u` to `v` + with respect to the mutual weights of the neighbors of `u` in `G`. + + `norm` specifies how the normalization factor is computed. It must + be a function that takes a single argument and returns a number. + The argument will be an iterable of mutual weights + of pairs ``(u, w)``, where ``w`` ranges over each (in- and + out-)neighbor of ``u``. Commons values for `normalization` are + ``sum`` and ``max``. + + `weight` can be ``None`` or a string, if None, all edge weights + are considered equal. Otherwise holds the name of the edge + attribute used as weight. + + """ + scale = norm(mutual_weight(G, u, w, weight) for w in set(nx.all_neighbors(G, u))) + return 0 if scale == 0 else mutual_weight(G, u, v, weight) / scale + + +@nx._dispatchable(edge_attrs="weight") +def effective_size(G, nodes=None, weight=None): + r"""Returns the effective size of all nodes in the graph ``G``. + + The *effective size* of a node's ego network is based on the concept + of redundancy. A person's ego network has redundancy to the extent + that her contacts are connected to each other as well. The + nonredundant part of a person's relationships is the effective + size of her ego network [1]_. Formally, the effective size of a + node $u$, denoted $e(u)$, is defined by + + .. math:: + + e(u) = \sum_{v \in N(u) \setminus \{u\}} + \left(1 - \sum_{w \in N(v)} p_{uw} m_{vw}\right) + + where $N(u)$ is the set of neighbors of $u$ and $p_{uw}$ is the + normalized mutual weight of the (directed or undirected) edges + joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. And $m_{vw}$ + is the mutual weight of $v$ and $w$ divided by $v$ highest mutual + weight with any of its neighbors. The *mutual weight* of $u$ and $v$ + is the sum of the weights of edges joining them (edge weights are + assumed to be one if the graph is unweighted). + + For the case of unweighted and undirected graphs, Borgatti proposed + a simplified formula to compute effective size [2]_ + + .. math:: + + e(u) = n - \frac{2t}{n} + + where `t` is the number of ties in the ego network (not including + ties to ego) and `n` is the number of nodes (excluding ego). + + Parameters + ---------- + G : NetworkX graph + The graph containing ``v``. Directed graphs are treated like + undirected graphs when computing neighbors of ``v``. + + nodes : container, optional + Container of nodes in the graph ``G`` to compute the effective size. + If None, the effective size of every node is computed. + + weight : None or string, optional + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + + Returns + ------- + dict + Dictionary with nodes as keys and the effective size of the node as values. + + Notes + ----- + Isolated nodes, including nodes which only have self-loop edges, do not + have a well-defined effective size:: + + >>> G = nx.path_graph(3) + >>> G.add_edge(4, 4) + >>> nx.effective_size(G) + {0: 1.0, 1: 2.0, 2: 1.0, 4: nan} + + Burt also defined the related concept of *efficiency* of a node's ego + network, which is its effective size divided by the degree of that + node [1]_. So you can easily compute efficiency: + + >>> G = nx.DiGraph() + >>> G.add_edges_from([(0, 1), (0, 2), (1, 0), (2, 1)]) + >>> esize = nx.effective_size(G) + >>> efficiency = {n: v / G.degree(n) for n, v in esize.items()} + + See also + -------- + constraint + + References + ---------- + .. [1] Burt, Ronald S. + *Structural Holes: The Social Structure of Competition.* + Cambridge: Harvard University Press, 1995. + + .. [2] Borgatti, S. + "Structural Holes: Unpacking Burt's Redundancy Measures" + CONNECTIONS 20(1):35-38. + http://www.analytictech.com/connections/v20(1)/holes.htm + + """ + + def redundancy(G, u, v, weight=None): + nmw = normalized_mutual_weight + r = sum( + nmw(G, u, w, weight=weight) * nmw(G, v, w, norm=max, weight=weight) + for w in set(nx.all_neighbors(G, u)) + ) + return 1 - r + + # Check if scipy is available + try: + # Needed for errstate + import numpy as np + + # make sure nx.adjacency_matrix will not raise + import scipy as sp + + has_scipy = True + except: + has_scipy = False + + if nodes is None and has_scipy: + # In order to compute constraint of all nodes, + # algorithms based on sparse matrices can be much faster + + # Obtain the adjacency matrix + P = nx.adjacency_matrix(G, weight=weight) + + # Calculate mutual weights + mutual_weights1 = P + P.T + mutual_weights2 = mutual_weights1.copy() + + with np.errstate(divide="ignore"): + # Mutual_weights1 = Normalize mutual weights by row sums + mutual_weights1 /= mutual_weights1.sum(axis=1)[:, np.newaxis] + + # Mutual_weights2 = Normalize mutual weights by row max + mutual_weights2 /= mutual_weights2.max(axis=1).toarray() + + # Calculate effective sizes + r = 1 - (mutual_weights1 @ mutual_weights2.T).toarray() + effective_size = ((mutual_weights1 > 0) * r).sum(axis=1) + + # Special treatment: isolated nodes (ignoring selfloops) marked with "nan" + sum_mutual_weights = mutual_weights1.sum(axis=1) - mutual_weights1.diagonal() + isolated_nodes = sum_mutual_weights == 0 + effective_size[isolated_nodes] = float("nan") + # Use tolist() to automatically convert numpy scalars -> Python scalars + return dict(zip(G, effective_size.tolist())) + + # Results for only requested nodes + effective_size = {} + if nodes is None: + nodes = G + # Use Borgatti's simplified formula for unweighted and undirected graphs + if not G.is_directed() and weight is None: + for v in nodes: + # Effective size is not defined for isolated nodes, including nodes + # with only self-edges + if all(u == v for u in G[v]): + effective_size[v] = float("nan") + continue + E = nx.ego_graph(G, v, center=False, undirected=True) + effective_size[v] = len(E) - (2 * E.size()) / len(E) + else: + for v in nodes: + # Effective size is not defined for isolated nodes, including nodes + # with only self-edges + if all(u == v for u in G[v]): + effective_size[v] = float("nan") + continue + effective_size[v] = sum( + redundancy(G, v, u, weight) for u in set(nx.all_neighbors(G, v)) + ) + return effective_size + + +@nx._dispatchable(edge_attrs="weight") +def constraint(G, nodes=None, weight=None): + r"""Returns the constraint on all nodes in the graph ``G``. + + The *constraint* is a measure of the extent to which a node *v* is + invested in those nodes that are themselves invested in the + neighbors of *v*. Formally, the *constraint on v*, denoted `c(v)`, + is defined by + + .. math:: + + c(v) = \sum_{w \in N(v) \setminus \{v\}} \ell(v, w) + + where $N(v)$ is the subset of the neighbors of `v` that are either + predecessors or successors of `v` and $\ell(v, w)$ is the local + constraint on `v` with respect to `w` [1]_. For the definition of local + constraint, see :func:`local_constraint`. + + Parameters + ---------- + G : NetworkX graph + The graph containing ``v``. This can be either directed or undirected. + + nodes : container, optional + Container of nodes in the graph ``G`` to compute the constraint. If + None, the constraint of every node is computed. + + weight : None or string, optional + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + + Returns + ------- + dict + Dictionary with nodes as keys and the constraint on the node as values. + + See also + -------- + local_constraint + + References + ---------- + .. [1] Burt, Ronald S. + "Structural holes and good ideas". + American Journal of Sociology (110): 349–399. + + """ + + # Check if scipy is available + try: + # Needed for errstate + import numpy as np + + # make sure nx.adjacency_matrix will not raise + import scipy as sp + + has_scipy = True + except: + has_scipy = False + + if nodes is None and has_scipy: + # In order to compute constraint of all nodes, + # algorithms based on sparse matrices can be much faster + + # Obtain the adjacency matrix + P = nx.adjacency_matrix(G, weight=weight) + + # Calculate mutual weights + mutual_weights = P + P.T + + # Normalize mutual weights by row sums + sum_mutual_weights = mutual_weights.sum(axis=1) + with np.errstate(divide="ignore"): + mutual_weights /= sum_mutual_weights[:, np.newaxis] + + # Calculate local constraints and constraints + local_constraints = (mutual_weights + mutual_weights @ mutual_weights) ** 2 + constraints = ((mutual_weights > 0) * local_constraints).sum(axis=1) + + # Special treatment: isolated nodes marked with "nan" + isolated_nodes = sum_mutual_weights - 2 * mutual_weights.diagonal() == 0 + constraints[isolated_nodes] = float("nan") + # Use tolist() to automatically convert numpy scalars -> Python scalars + return dict(zip(G, constraints.tolist())) + + # Result for only requested nodes + constraint = {} + if nodes is None: + nodes = G + for v in nodes: + # Constraint is not defined for isolated nodes + if len(G[v]) == 0: + constraint[v] = float("nan") + continue + constraint[v] = sum( + local_constraint(G, v, n, weight) for n in set(nx.all_neighbors(G, v)) + ) + return constraint + + +@nx._dispatchable(edge_attrs="weight") +def local_constraint(G, u, v, weight=None): + r"""Returns the local constraint on the node ``u`` with respect to + the node ``v`` in the graph ``G``. + + Formally, the *local constraint on u with respect to v*, denoted + $\ell(u, v)$, is defined by + + .. math:: + + \ell(u, v) = \left(p_{uv} + \sum_{w \in N(v)} p_{uw} p_{wv}\right)^2, + + where $N(v)$ is the set of neighbors of $v$ and $p_{uv}$ is the + normalized mutual weight of the (directed or undirected) edges + joining $u$ and $v$, for each vertex $u$ and $v$ [1]_. The *mutual + weight* of $u$ and $v$ is the sum of the weights of edges joining + them (edge weights are assumed to be one if the graph is + unweighted). + + Parameters + ---------- + G : NetworkX graph + The graph containing ``u`` and ``v``. This can be either + directed or undirected. + + u : node + A node in the graph ``G``. + + v : node + A node in the graph ``G``. + + weight : None or string, optional + If None, all edge weights are considered equal. + Otherwise holds the name of the edge attribute used as weight. + + Returns + ------- + float + The constraint of the node ``v`` in the graph ``G``. + + See also + -------- + constraint + + References + ---------- + .. [1] Burt, Ronald S. + "Structural holes and good ideas". + American Journal of Sociology (110): 349–399. + + """ + nmw = normalized_mutual_weight + direct = nmw(G, u, v, weight=weight) + indirect = sum( + nmw(G, u, w, weight=weight) * nmw(G, w, v, weight=weight) + for w in set(nx.all_neighbors(G, u)) + ) + return (direct + indirect) ** 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/summarization.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/summarization.py new file mode 100644 index 0000000..23db8da --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/summarization.py @@ -0,0 +1,564 @@ +""" +Graph summarization finds smaller representations of graphs resulting in faster +runtime of algorithms, reduced storage needs, and noise reduction. +Summarization has applications in areas such as visualization, pattern mining, +clustering and community detection, and more. Core graph summarization +techniques are grouping/aggregation, bit-compression, +simplification/sparsification, and influence based. Graph summarization +algorithms often produce either summary graphs in the form of supergraphs or +sparsified graphs, or a list of independent structures. Supergraphs are the +most common product, which consist of supernodes and original nodes and are +connected by edges and superedges, which represent aggregate edges between +nodes and supernodes. + +Grouping/aggregation based techniques compress graphs by representing +close/connected nodes and edges in a graph by a single node/edge in a +supergraph. Nodes can be grouped together into supernodes based on their +structural similarities or proximity within a graph to reduce the total number +of nodes in a graph. Edge-grouping techniques group edges into lossy/lossless +nodes called compressor or virtual nodes to reduce the total number of edges in +a graph. Edge-grouping techniques can be lossless, meaning that they can be +used to re-create the original graph, or techniques can be lossy, requiring +less space to store the summary graph, but at the expense of lower +reconstruction accuracy of the original graph. + +Bit-compression techniques minimize the amount of information needed to +describe the original graph, while revealing structural patterns in the +original graph. The two-part minimum description length (MDL) is often used to +represent the model and the original graph in terms of the model. A key +difference between graph compression and graph summarization is that graph +summarization focuses on finding structural patterns within the original graph, +whereas graph compression focuses on compressions the original graph to be as +small as possible. **NOTE**: Some bit-compression methods exist solely to +compress a graph without creating a summary graph or finding comprehensible +structural patterns. + +Simplification/Sparsification techniques attempt to create a sparse +representation of a graph by removing unimportant nodes and edges from the +graph. Sparsified graphs differ from supergraphs created by +grouping/aggregation by only containing a subset of the original nodes and +edges of the original graph. + +Influence based techniques aim to find a high-level description of influence +propagation in a large graph. These methods are scarce and have been mostly +applied to social graphs. + +*dedensification* is a grouping/aggregation based technique to compress the +neighborhoods around high-degree nodes in unweighted graphs by adding +compressor nodes that summarize multiple edges of the same type to +high-degree nodes (nodes with a degree greater than a given threshold). +Dedensification was developed for the purpose of increasing performance of +query processing around high-degree nodes in graph databases and enables direct +operations on the compressed graph. The structural patterns surrounding +high-degree nodes in the original is preserved while using fewer edges and +adding a small number of compressor nodes. The degree of nodes present in the +original graph is also preserved. The current implementation of dedensification +supports graphs with one edge type. + +For more information on graph summarization, see `Graph Summarization Methods +and Applications: A Survey `_ +""" + +from collections import Counter, defaultdict + +import networkx as nx + +__all__ = ["dedensify", "snap_aggregation"] + + +@nx._dispatchable(mutates_input={"not copy": 3}, returns_graph=True) +def dedensify(G, threshold, prefix=None, copy=True): + """Compresses neighborhoods around high-degree nodes + + Reduces the number of edges to high-degree nodes by adding compressor nodes + that summarize multiple edges of the same type to high-degree nodes (nodes + with a degree greater than a given threshold). Dedensification also has + the added benefit of reducing the number of edges around high-degree nodes. + The implementation currently supports graphs with a single edge type. + + Parameters + ---------- + G: graph + A networkx graph + threshold: int + Minimum degree threshold of a node to be considered a high degree node. + The threshold must be greater than or equal to 2. + prefix: str or None, optional (default: None) + An optional prefix for denoting compressor nodes + copy: bool, optional (default: True) + Indicates if dedensification should be done inplace + + Returns + ------- + dedensified networkx graph : (graph, set) + 2-tuple of the dedensified graph and set of compressor nodes + + Notes + ----- + According to the algorithm in [1]_, removes edges in a graph by + compressing/decompressing the neighborhoods around high degree nodes by + adding compressor nodes that summarize multiple edges of the same type + to high-degree nodes. Dedensification will only add a compressor node when + doing so will reduce the total number of edges in the given graph. This + implementation currently supports graphs with a single edge type. + + Examples + -------- + Dedensification will only add compressor nodes when doing so would result + in fewer edges:: + + >>> original_graph = nx.DiGraph() + >>> original_graph.add_nodes_from( + ... ["1", "2", "3", "4", "5", "6", "A", "B", "C"] + ... ) + >>> original_graph.add_edges_from( + ... [ + ... ("1", "C"), ("1", "B"), + ... ("2", "C"), ("2", "B"), ("2", "A"), + ... ("3", "B"), ("3", "A"), ("3", "6"), + ... ("4", "C"), ("4", "B"), ("4", "A"), + ... ("5", "B"), ("5", "A"), + ... ("6", "5"), + ... ("A", "6") + ... ] + ... ) + >>> c_graph, c_nodes = nx.dedensify(original_graph, threshold=2) + >>> original_graph.number_of_edges() + 15 + >>> c_graph.number_of_edges() + 14 + + A dedensified, directed graph can be "densified" to reconstruct the + original graph:: + + >>> original_graph = nx.DiGraph() + >>> original_graph.add_nodes_from( + ... ["1", "2", "3", "4", "5", "6", "A", "B", "C"] + ... ) + >>> original_graph.add_edges_from( + ... [ + ... ("1", "C"), ("1", "B"), + ... ("2", "C"), ("2", "B"), ("2", "A"), + ... ("3", "B"), ("3", "A"), ("3", "6"), + ... ("4", "C"), ("4", "B"), ("4", "A"), + ... ("5", "B"), ("5", "A"), + ... ("6", "5"), + ... ("A", "6") + ... ] + ... ) + >>> c_graph, c_nodes = nx.dedensify(original_graph, threshold=2) + >>> # re-densifies the compressed graph into the original graph + >>> for c_node in c_nodes: + ... all_neighbors = set(nx.all_neighbors(c_graph, c_node)) + ... out_neighbors = set(c_graph.neighbors(c_node)) + ... for out_neighbor in out_neighbors: + ... c_graph.remove_edge(c_node, out_neighbor) + ... in_neighbors = all_neighbors - out_neighbors + ... for in_neighbor in in_neighbors: + ... c_graph.remove_edge(in_neighbor, c_node) + ... for out_neighbor in out_neighbors: + ... c_graph.add_edge(in_neighbor, out_neighbor) + ... c_graph.remove_node(c_node) + ... + >>> nx.is_isomorphic(original_graph, c_graph) + True + + References + ---------- + .. [1] Maccioni, A., & Abadi, D. J. (2016, August). + Scalable pattern matching over compressed graphs via dedensification. + In Proceedings of the 22nd ACM SIGKDD International Conference on + Knowledge Discovery and Data Mining (pp. 1755-1764). + http://www.cs.umd.edu/~abadi/papers/graph-dedense.pdf + """ + if threshold < 2: + raise nx.NetworkXError("The degree threshold must be >= 2") + + degrees = G.in_degree if G.is_directed() else G.degree + # Group nodes based on degree threshold + high_degree_nodes = {n for n, d in degrees if d > threshold} + low_degree_nodes = G.nodes() - high_degree_nodes + + auxiliary = {} + for node in G: + high_degree_nbrs = frozenset(high_degree_nodes & set(G[node])) + if high_degree_nbrs: + if high_degree_nbrs in auxiliary: + auxiliary[high_degree_nbrs].add(node) + else: + auxiliary[high_degree_nbrs] = {node} + + if copy: + G = G.copy() + + compressor_nodes = set() + for index, (high_degree_nodes, low_degree_nodes) in enumerate(auxiliary.items()): + low_degree_node_count = len(low_degree_nodes) + high_degree_node_count = len(high_degree_nodes) + old_edges = high_degree_node_count * low_degree_node_count + new_edges = high_degree_node_count + low_degree_node_count + if old_edges <= new_edges: + continue + compression_node = "".join(str(node) for node in high_degree_nodes) + if prefix: + compression_node = str(prefix) + compression_node + for node in low_degree_nodes: + for high_node in high_degree_nodes: + if G.has_edge(node, high_node): + G.remove_edge(node, high_node) + + G.add_edge(node, compression_node) + for node in high_degree_nodes: + G.add_edge(compression_node, node) + compressor_nodes.add(compression_node) + return G, compressor_nodes + + +def _snap_build_graph( + G, + groups, + node_attributes, + edge_attributes, + neighbor_info, + edge_types, + prefix, + supernode_attribute, + superedge_attribute, +): + """ + Build the summary graph from the data structures produced in the SNAP aggregation algorithm + + Used in the SNAP aggregation algorithm to build the output summary graph and supernode + lookup dictionary. This process uses the original graph and the data structures to + create the supernodes with the correct node attributes, and the superedges with the correct + edge attributes + + Parameters + ---------- + G: networkx.Graph + the original graph to be summarized + groups: dict + A dictionary of unique group IDs and their corresponding node groups + node_attributes: iterable + An iterable of the node attributes considered in the summarization process + edge_attributes: iterable + An iterable of the edge attributes considered in the summarization process + neighbor_info: dict + A data structure indicating the number of edges a node has with the + groups in the current summarization of each edge type + edge_types: dict + dictionary of edges in the graph and their corresponding attributes recognized + in the summarization + prefix: string + The prefix to be added to all supernodes + supernode_attribute: str + The node attribute for recording the supernode groupings of nodes + superedge_attribute: str + The edge attribute for recording the edge types represented by superedges + + Returns + ------- + summary graph: Networkx graph + """ + output = G.__class__() + node_label_lookup = {} + for index, group_id in enumerate(groups): + group_set = groups[group_id] + supernode = f"{prefix}{index}" + node_label_lookup[group_id] = supernode + supernode_attributes = { + attr: G.nodes[next(iter(group_set))][attr] for attr in node_attributes + } + supernode_attributes[supernode_attribute] = group_set + output.add_node(supernode, **supernode_attributes) + + for group_id in groups: + group_set = groups[group_id] + source_supernode = node_label_lookup[group_id] + for other_group, group_edge_types in neighbor_info[ + next(iter(group_set)) + ].items(): + if group_edge_types: + target_supernode = node_label_lookup[other_group] + summary_graph_edge = (source_supernode, target_supernode) + + edge_types = [ + dict(zip(edge_attributes, edge_type)) + for edge_type in group_edge_types + ] + + has_edge = output.has_edge(*summary_graph_edge) + if output.is_multigraph(): + if not has_edge: + for edge_type in edge_types: + output.add_edge(*summary_graph_edge, **edge_type) + elif not output.is_directed(): + existing_edge_data = output.get_edge_data(*summary_graph_edge) + for edge_type in edge_types: + if edge_type not in existing_edge_data.values(): + output.add_edge(*summary_graph_edge, **edge_type) + else: + superedge_attributes = {superedge_attribute: edge_types} + output.add_edge(*summary_graph_edge, **superedge_attributes) + + return output + + +def _snap_eligible_group(G, groups, group_lookup, edge_types): + """ + Determines if a group is eligible to be split. + + A group is eligible to be split if all nodes in the group have edges of the same type(s) + with the same other groups. + + Parameters + ---------- + G: graph + graph to be summarized + groups: dict + A dictionary of unique group IDs and their corresponding node groups + group_lookup: dict + dictionary of nodes and their current corresponding group ID + edge_types: dict + dictionary of edges in the graph and their corresponding attributes recognized + in the summarization + + Returns + ------- + tuple: group ID to split, and neighbor-groups participation_counts data structure + """ + nbr_info = {node: {gid: Counter() for gid in groups} for node in group_lookup} + for group_id in groups: + current_group = groups[group_id] + + # build nbr_info for nodes in group + for node in current_group: + nbr_info[node] = {group_id: Counter() for group_id in groups} + edges = G.edges(node, keys=True) if G.is_multigraph() else G.edges(node) + for edge in edges: + neighbor = edge[1] + edge_type = edge_types[edge] + neighbor_group_id = group_lookup[neighbor] + nbr_info[node][neighbor_group_id][edge_type] += 1 + + # check if group_id is eligible to be split + group_size = len(current_group) + for other_group_id in groups: + edge_counts = Counter() + for node in current_group: + edge_counts.update(nbr_info[node][other_group_id].keys()) + + if not all(count == group_size for count in edge_counts.values()): + # only the nbr_info of the returned group_id is required for handling group splits + return group_id, nbr_info + + # if no eligible groups, complete nbr_info is calculated + return None, nbr_info + + +def _snap_split(groups, neighbor_info, group_lookup, group_id): + """ + Splits a group based on edge types and updates the groups accordingly + + Splits the group with the given group_id based on the edge types + of the nodes so that each new grouping will all have the same + edges with other nodes. + + Parameters + ---------- + groups: dict + A dictionary of unique group IDs and their corresponding node groups + neighbor_info: dict + A data structure indicating the number of edges a node has with the + groups in the current summarization of each edge type + edge_types: dict + dictionary of edges in the graph and their corresponding attributes recognized + in the summarization + group_lookup: dict + dictionary of nodes and their current corresponding group ID + group_id: object + ID of group to be split + + Returns + ------- + dict + The updated groups based on the split + """ + new_group_mappings = defaultdict(set) + for node in groups[group_id]: + signature = tuple( + frozenset(edge_types) for edge_types in neighbor_info[node].values() + ) + new_group_mappings[signature].add(node) + + # leave the biggest new_group as the original group + new_groups = sorted(new_group_mappings.values(), key=len) + for new_group in new_groups[:-1]: + # Assign unused integer as the new_group_id + # ids are tuples, so will not interact with the original group_ids + new_group_id = len(groups) + groups[new_group_id] = new_group + groups[group_id] -= new_group + for node in new_group: + group_lookup[node] = new_group_id + + return groups + + +@nx._dispatchable( + node_attrs="[node_attributes]", edge_attrs="[edge_attributes]", returns_graph=True +) +def snap_aggregation( + G, + node_attributes, + edge_attributes=(), + prefix="Supernode-", + supernode_attribute="group", + superedge_attribute="types", +): + """Creates a summary graph based on attributes and connectivity. + + This function uses the Summarization by Grouping Nodes on Attributes + and Pairwise edges (SNAP) algorithm for summarizing a given + graph by grouping nodes by node attributes and their edge attributes + into supernodes in a summary graph. This name SNAP should not be + confused with the Stanford Network Analysis Project (SNAP). + + Here is a high-level view of how this algorithm works: + + 1) Group nodes by node attribute values. + + 2) Iteratively split groups until all nodes in each group have edges + to nodes in the same groups. That is, until all the groups are homogeneous + in their member nodes' edges to other groups. For example, + if all the nodes in group A only have edge to nodes in group B, then the + group is homogeneous and does not need to be split. If all nodes in group B + have edges with nodes in groups {A, C}, but some also have edges with other + nodes in B, then group B is not homogeneous and needs to be split into + groups have edges with {A, C} and a group of nodes having + edges with {A, B, C}. This way, viewers of the summary graph can + assume that all nodes in the group have the exact same node attributes and + the exact same edges. + + 3) Build the output summary graph, where the groups are represented by + super-nodes. Edges represent the edges shared between all the nodes in each + respective groups. + + A SNAP summary graph can be used to visualize graphs that are too large to display + or visually analyze, or to efficiently identify sets of similar nodes with similar connectivity + patterns to other sets of similar nodes based on specified node and/or edge attributes in a graph. + + Parameters + ---------- + G: graph + Networkx Graph to be summarized + node_attributes: iterable, required + An iterable of the node attributes used to group nodes in the summarization process. Nodes + with the same values for these attributes will be grouped together in the summary graph. + edge_attributes: iterable, optional + An iterable of the edge attributes considered in the summarization process. If provided, unique + combinations of the attribute values found in the graph are used to + determine the edge types in the graph. If not provided, all edges + are considered to be of the same type. + prefix: str + The prefix used to denote supernodes in the summary graph. Defaults to 'Supernode-'. + supernode_attribute: str + The node attribute for recording the supernode groupings of nodes. Defaults to 'group'. + superedge_attribute: str + The edge attribute for recording the edge types of multiple edges. Defaults to 'types'. + + Returns + ------- + networkx.Graph: summary graph + + Examples + -------- + SNAP aggregation takes a graph and summarizes it in the context of user-provided + node and edge attributes such that a viewer can more easily extract and + analyze the information represented by the graph + + >>> nodes = { + ... "A": dict(color="Red"), + ... "B": dict(color="Red"), + ... "C": dict(color="Red"), + ... "D": dict(color="Red"), + ... "E": dict(color="Blue"), + ... "F": dict(color="Blue"), + ... } + >>> edges = [ + ... ("A", "E", "Strong"), + ... ("B", "F", "Strong"), + ... ("C", "E", "Weak"), + ... ("D", "F", "Weak"), + ... ] + >>> G = nx.Graph() + >>> for node in nodes: + ... attributes = nodes[node] + ... G.add_node(node, **attributes) + >>> for source, target, type in edges: + ... G.add_edge(source, target, type=type) + >>> node_attributes = ("color",) + >>> edge_attributes = ("type",) + >>> summary_graph = nx.snap_aggregation( + ... G, node_attributes=node_attributes, edge_attributes=edge_attributes + ... ) + + Notes + ----- + The summary graph produced is called a maximum Attribute-edge + compatible (AR-compatible) grouping. According to [1]_, an + AR-compatible grouping means that all nodes in each group have the same + exact node attribute values and the same exact edges and + edge types to one or more nodes in the same groups. The maximal + AR-compatible grouping is the grouping with the minimal cardinality. + + The AR-compatible grouping is the most detailed grouping provided by + any of the SNAP algorithms. + + References + ---------- + .. [1] Y. Tian, R. A. Hankins, and J. M. Patel. Efficient aggregation + for graph summarization. In Proc. 2008 ACM-SIGMOD Int. Conf. + Management of Data (SIGMOD’08), pages 567–580, Vancouver, Canada, + June 2008. + """ + edge_types = { + edge: tuple(attrs.get(attr) for attr in edge_attributes) + for edge, attrs in G.edges.items() + } + if not G.is_directed(): + if G.is_multigraph(): + # list is needed to avoid mutating while iterating + edges = [((v, u, k), etype) for (u, v, k), etype in edge_types.items()] + else: + # list is needed to avoid mutating while iterating + edges = [((v, u), etype) for (u, v), etype in edge_types.items()] + edge_types.update(edges) + + group_lookup = { + node: tuple(attrs[attr] for attr in node_attributes) + for node, attrs in G.nodes.items() + } + groups = defaultdict(set) + for node, node_type in group_lookup.items(): + groups[node_type].add(node) + + eligible_group_id, nbr_info = _snap_eligible_group( + G, groups, group_lookup, edge_types + ) + while eligible_group_id: + groups = _snap_split(groups, nbr_info, group_lookup, eligible_group_id) + eligible_group_id, nbr_info = _snap_eligible_group( + G, groups, group_lookup, edge_types + ) + return _snap_build_graph( + G, + groups, + node_attributes, + edge_attributes, + nbr_info, + edge_types, + prefix, + supernode_attribute, + superedge_attribute, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/swap.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/swap.py new file mode 100644 index 0000000..cb3cc1c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/swap.py @@ -0,0 +1,406 @@ +"""Swap edges in a graph.""" + +import math + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = ["double_edge_swap", "connected_double_edge_swap", "directed_edge_swap"] + + +@nx.utils.not_implemented_for("undirected") +@py_random_state(3) +@nx._dispatchable(mutates_input=True, returns_graph=True) +def directed_edge_swap(G, *, nswap=1, max_tries=100, seed=None): + """Swap three edges in a directed graph while keeping the node degrees fixed. + + A directed edge swap swaps three edges such that a -> b -> c -> d becomes + a -> c -> b -> d. This pattern of swapping allows all possible states with the + same in- and out-degree distribution in a directed graph to be reached. + + If the swap would create parallel edges (e.g. if a -> c already existed in the + previous example), another attempt is made to find a suitable trio of edges. + + Parameters + ---------- + G : DiGraph + A directed graph + + nswap : integer (optional, default=1) + Number of three-edge (directed) swaps to perform + + max_tries : integer (optional, default=100) + Maximum number of attempts to swap edges + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : DiGraph + The graph after the edges are swapped. + + Raises + ------ + NetworkXError + If `G` is not directed, or + If nswap > max_tries, or + If there are fewer than 4 nodes or 3 edges in `G`. + NetworkXAlgorithmError + If the number of swap attempts exceeds `max_tries` before `nswap` swaps are made + + Notes + ----- + Does not enforce any connectivity constraints. + + The graph G is modified in place. + + A later swap is allowed to undo a previous swap. + + References + ---------- + .. [1] Erdős, Péter L., et al. “A Simple Havel-Hakimi Type Algorithm to Realize + Graphical Degree Sequences of Directed Graphs.” ArXiv:0905.4913 [Math], + Jan. 2010. https://doi.org/10.48550/arXiv.0905.4913. + Published 2010 in Elec. J. Combinatorics (17(1)). R66. + http://www.combinatorics.org/Volume_17/PDF/v17i1r66.pdf + .. [2] “Combinatorics - Reaching All Possible Simple Directed Graphs with a given + Degree Sequence with 2-Edge Swaps.” Mathematics Stack Exchange, + https://math.stackexchange.com/questions/22272/. Accessed 30 May 2022. + """ + if nswap > max_tries: + raise nx.NetworkXError("Number of swaps > number of tries allowed.") + if len(G) < 4: + raise nx.NetworkXError("DiGraph has fewer than four nodes.") + if len(G.edges) < 3: + raise nx.NetworkXError("DiGraph has fewer than 3 edges") + + # Instead of choosing uniformly at random from a generated edge list, + # this algorithm chooses nonuniformly from the set of nodes with + # probability weighted by degree. + tries = 0 + swapcount = 0 + keys, degrees = zip(*G.degree()) # keys, degree + cdf = nx.utils.cumulative_distribution(degrees) # cdf of degree + discrete_sequence = nx.utils.discrete_sequence + + while swapcount < nswap: + # choose source node index from discrete distribution + start_index = discrete_sequence(1, cdistribution=cdf, seed=seed)[0] + start = keys[start_index] + tries += 1 + + if tries > max_tries: + msg = f"Maximum number of swap attempts ({tries}) exceeded before desired swaps achieved ({nswap})." + raise nx.NetworkXAlgorithmError(msg) + + # If the given node doesn't have any out edges, then there isn't anything to swap + if G.out_degree(start) == 0: + continue + second = seed.choice(list(G.succ[start])) + if start == second: + continue + + if G.out_degree(second) == 0: + continue + third = seed.choice(list(G.succ[second])) + if second == third: + continue + + if G.out_degree(third) == 0: + continue + fourth = seed.choice(list(G.succ[third])) + if third == fourth: + continue + + if ( + third not in G.succ[start] + and fourth not in G.succ[second] + and second not in G.succ[third] + ): + # Swap nodes + G.add_edge(start, third) + G.add_edge(third, second) + G.add_edge(second, fourth) + G.remove_edge(start, second) + G.remove_edge(second, third) + G.remove_edge(third, fourth) + swapcount += 1 + + return G + + +@py_random_state(3) +@nx._dispatchable(mutates_input=True, returns_graph=True) +def double_edge_swap(G, nswap=1, max_tries=100, seed=None): + """Swap two edges in the graph while keeping the node degrees fixed. + + A double-edge swap removes two randomly chosen edges u-v and x-y + and creates the new edges u-x and v-y:: + + u--v u v + becomes | | + x--y x y + + If either the edge u-x or v-y already exist no swap is performed + and another attempt is made to find a suitable edge pair. + + Parameters + ---------- + G : graph + An undirected graph + + nswap : integer (optional, default=1) + Number of double-edge swaps to perform + + max_tries : integer (optional) + Maximum number of attempts to swap edges + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : graph + The graph after double edge swaps. + + Raises + ------ + NetworkXError + If `G` is directed, or + If `nswap` > `max_tries`, or + If there are fewer than 4 nodes or 2 edges in `G`. + NetworkXAlgorithmError + If the number of swap attempts exceeds `max_tries` before `nswap` swaps are made + + Notes + ----- + Does not enforce any connectivity constraints. + + The graph G is modified in place. + """ + if G.is_directed(): + raise nx.NetworkXError( + "double_edge_swap() not defined for directed graphs. Use directed_edge_swap instead." + ) + if nswap > max_tries: + raise nx.NetworkXError("Number of swaps > number of tries allowed.") + if len(G) < 4: + raise nx.NetworkXError("Graph has fewer than four nodes.") + if len(G.edges) < 2: + raise nx.NetworkXError("Graph has fewer than 2 edges") + # Instead of choosing uniformly at random from a generated edge list, + # this algorithm chooses nonuniformly from the set of nodes with + # probability weighted by degree. + n = 0 + swapcount = 0 + keys, degrees = zip(*G.degree()) # keys, degree + cdf = nx.utils.cumulative_distribution(degrees) # cdf of degree + discrete_sequence = nx.utils.discrete_sequence + while swapcount < nswap: + # if random.random() < 0.5: continue # trick to avoid periodicities? + # pick two random edges without creating edge list + # choose source node indices from discrete distribution + (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed) + if ui == xi: + continue # same source, skip + u = keys[ui] # convert index to label + x = keys[xi] + # choose target uniformly from neighbors + v = seed.choice(list(G[u])) + y = seed.choice(list(G[x])) + if v == y: + continue # same target, skip + if (x not in G[u]) and (y not in G[v]): # don't create parallel edges + G.add_edge(u, x) + G.add_edge(v, y) + G.remove_edge(u, v) + G.remove_edge(x, y) + swapcount += 1 + if n >= max_tries: + e = ( + f"Maximum number of swap attempts ({n}) exceeded " + f"before desired swaps achieved ({nswap})." + ) + raise nx.NetworkXAlgorithmError(e) + n += 1 + return G + + +@py_random_state(3) +@nx._dispatchable(mutates_input=True) +def connected_double_edge_swap(G, nswap=1, _window_threshold=3, seed=None): + """Attempts the specified number of double-edge swaps in the graph `G`. + + A double-edge swap removes two randomly chosen edges `(u, v)` and `(x, + y)` and creates the new edges `(u, x)` and `(v, y)`:: + + u--v u v + becomes | | + x--y x y + + If either `(u, x)` or `(v, y)` already exist, then no swap is performed + so the actual number of swapped edges is always *at most* `nswap`. + + Parameters + ---------- + G : graph + An undirected graph + + nswap : integer (optional, default=1) + Number of double-edge swaps to perform + + _window_threshold : integer + + The window size below which connectedness of the graph will be checked + after each swap. + + The "window" in this function is a dynamically updated integer that + represents the number of swap attempts to make before checking if the + graph remains connected. It is an optimization used to decrease the + running time of the algorithm in exchange for increased complexity of + implementation. + + If the window size is below this threshold, then the algorithm checks + after each swap if the graph remains connected by checking if there is a + path joining the two nodes whose edge was just removed. If the window + size is above this threshold, then the algorithm performs do all the + swaps in the window and only then check if the graph is still connected. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + int + The number of successful swaps + + Raises + ------ + + NetworkXError + + If the input graph is not connected, or if the graph has fewer than four + nodes. + + Notes + ----- + + The initial graph `G` must be connected, and the resulting graph is + connected. The graph `G` is modified in place. + + References + ---------- + .. [1] C. Gkantsidis and M. Mihail and E. Zegura, + The Markov chain simulation method for generating connected + power law random graphs, 2003. + http://citeseer.ist.psu.edu/gkantsidis03markov.html + """ + if not nx.is_connected(G): + raise nx.NetworkXError("Graph not connected") + if len(G) < 4: + raise nx.NetworkXError("Graph has fewer than four nodes.") + n = 0 + swapcount = 0 + deg = G.degree() + # Label key for nodes + dk = [n for n, d in G.degree()] + cdf = nx.utils.cumulative_distribution([d for n, d in G.degree()]) + discrete_sequence = nx.utils.discrete_sequence + window = 1 + while n < nswap: + wcount = 0 + swapped = [] + # If the window is small, we just check each time whether the graph is + # connected by checking if the nodes that were just separated are still + # connected. + if window < _window_threshold: + # This Boolean keeps track of whether there was a failure or not. + fail = False + while wcount < window and n < nswap: + # Pick two random edges without creating the edge list. Choose + # source nodes from the discrete degree distribution. + (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed) + # If the source nodes are the same, skip this pair. + if ui == xi: + continue + # Convert an index to a node label. + u = dk[ui] + x = dk[xi] + # Choose targets uniformly from neighbors. + v = seed.choice(list(G.neighbors(u))) + y = seed.choice(list(G.neighbors(x))) + # If the target nodes are the same, skip this pair. + if v == y: + continue + if x not in G[u] and y not in G[v]: + G.remove_edge(u, v) + G.remove_edge(x, y) + G.add_edge(u, x) + G.add_edge(v, y) + swapped.append((u, v, x, y)) + swapcount += 1 + n += 1 + # If G remains connected... + if nx.has_path(G, u, v): + wcount += 1 + # Otherwise, undo the changes. + else: + G.add_edge(u, v) + G.add_edge(x, y) + G.remove_edge(u, x) + G.remove_edge(v, y) + swapcount -= 1 + fail = True + # If one of the swaps failed, reduce the window size. + if fail: + window = math.ceil(window / 2) + else: + window += 1 + # If the window is large, then there is a good chance that a bunch of + # swaps will work. It's quicker to do all those swaps first and then + # check if the graph remains connected. + else: + while wcount < window and n < nswap: + # Pick two random edges without creating the edge list. Choose + # source nodes from the discrete degree distribution. + (ui, xi) = discrete_sequence(2, cdistribution=cdf, seed=seed) + # If the source nodes are the same, skip this pair. + if ui == xi: + continue + # Convert an index to a node label. + u = dk[ui] + x = dk[xi] + # Choose targets uniformly from neighbors. + v = seed.choice(list(G.neighbors(u))) + y = seed.choice(list(G.neighbors(x))) + # If the target nodes are the same, skip this pair. + if v == y: + continue + if x not in G[u] and y not in G[v]: + G.remove_edge(u, v) + G.remove_edge(x, y) + G.add_edge(u, x) + G.add_edge(v, y) + swapped.append((u, v, x, y)) + swapcount += 1 + n += 1 + wcount += 1 + # If the graph remains connected, increase the window size. + if nx.is_connected(G): + window += 1 + # Otherwise, undo the changes from the previous window and decrease + # the window size. + else: + while swapped: + (u, v, x, y) = swapped.pop() + G.add_edge(u, v) + G.add_edge(x, y) + G.remove_edge(u, x) + G.remove_edge(v, y) + swapcount -= 1 + window = math.ceil(window / 2) + return swapcount diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..25ea490 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-312.pyc new file mode 100644 index 0000000..e3df714 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_asteroidal.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-312.pyc new file mode 100644 index 0000000..c16a133 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_boundary.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-312.pyc new file mode 100644 index 0000000..5c2625c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_bridges.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_broadcasting.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_broadcasting.cpython-312.pyc new file mode 100644 index 0000000..2336ed1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_broadcasting.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_chains.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_chains.cpython-312.pyc new file mode 100644 index 0000000..b52e032 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_chains.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_chordal.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_chordal.cpython-312.pyc new file mode 100644 index 0000000..48dc525 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_chordal.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_clique.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_clique.cpython-312.pyc new file mode 100644 index 0000000..6f3f59e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_clique.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cluster.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cluster.cpython-312.pyc new file mode 100644 index 0000000..cbc50ad Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cluster.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_communicability.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_communicability.cpython-312.pyc new file mode 100644 index 0000000..df2b831 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_communicability.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-312.pyc new file mode 100644 index 0000000..d4a19af Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_core.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_covering.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_covering.cpython-312.pyc new file mode 100644 index 0000000..f28bda5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_covering.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cuts.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cuts.cpython-312.pyc new file mode 100644 index 0000000..11a3b05 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cuts.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cycles.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cycles.cpython-312.pyc new file mode 100644 index 0000000..a824759 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_cycles.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_d_separation.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_d_separation.cpython-312.pyc new file mode 100644 index 0000000..5ccece3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_d_separation.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dag.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dag.cpython-312.pyc new file mode 100644 index 0000000..50a61b9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dag.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_distance_measures.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_distance_measures.cpython-312.pyc new file mode 100644 index 0000000..1998a7d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_distance_measures.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_distance_regular.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_distance_regular.cpython-312.pyc new file mode 100644 index 0000000..8e2cae4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_distance_regular.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dominance.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dominance.cpython-312.pyc new file mode 100644 index 0000000..5078529 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dominance.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dominating.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dominating.cpython-312.pyc new file mode 100644 index 0000000..ee9904b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_dominating.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_efficiency.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_efficiency.cpython-312.pyc new file mode 100644 index 0000000..0267338 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_efficiency.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_euler.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_euler.cpython-312.pyc new file mode 100644 index 0000000..8dfca69 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_euler.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-312.pyc new file mode 100644 index 0000000..27d6791 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_graph_hashing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_graphical.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_graphical.cpython-312.pyc new file mode 100644 index 0000000..82e90b1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_graphical.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_hierarchy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_hierarchy.cpython-312.pyc new file mode 100644 index 0000000..2ef0b98 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_hierarchy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_hybrid.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_hybrid.cpython-312.pyc new file mode 100644 index 0000000..34b0609 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_hybrid.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_isolate.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_isolate.cpython-312.pyc new file mode 100644 index 0000000..f6dbe70 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_isolate.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_link_prediction.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_link_prediction.cpython-312.pyc new file mode 100644 index 0000000..036ca25 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_link_prediction.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-312.pyc new file mode 100644 index 0000000..b53f0f0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_lowest_common_ancestors.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_matching.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_matching.cpython-312.pyc new file mode 100644 index 0000000..ed289e2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_matching.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_max_weight_clique.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_max_weight_clique.cpython-312.pyc new file mode 100644 index 0000000..0160b87 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_max_weight_clique.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-312.pyc new file mode 100644 index 0000000..3fd4a5d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_mis.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-312.pyc new file mode 100644 index 0000000..ead4274 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_moral.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_node_classification.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_node_classification.cpython-312.pyc new file mode 100644 index 0000000..5cd51d1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_node_classification.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_non_randomness.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_non_randomness.cpython-312.pyc new file mode 100644 index 0000000..33b4a24 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_non_randomness.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_planar_drawing.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_planar_drawing.cpython-312.pyc new file mode 100644 index 0000000..915a7cc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_planar_drawing.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_planarity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_planarity.cpython-312.pyc new file mode 100644 index 0000000..23ccff9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_planarity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-312.pyc new file mode 100644 index 0000000..c02bb79 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_polynomials.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_reciprocity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_reciprocity.cpython-312.pyc new file mode 100644 index 0000000..a823642 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_reciprocity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_regular.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_regular.cpython-312.pyc new file mode 100644 index 0000000..c074ee0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_regular.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-312.pyc new file mode 100644 index 0000000..43abf94 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_richclub.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_similarity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_similarity.cpython-312.pyc new file mode 100644 index 0000000..91db20d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_similarity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-312.pyc new file mode 100644 index 0000000..854c485 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_simple_paths.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_smallworld.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_smallworld.cpython-312.pyc new file mode 100644 index 0000000..399fde5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_smallworld.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_smetric.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_smetric.cpython-312.pyc new file mode 100644 index 0000000..d492824 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_smetric.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_sparsifiers.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_sparsifiers.cpython-312.pyc new file mode 100644 index 0000000..a0a990e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_sparsifiers.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_structuralholes.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_structuralholes.cpython-312.pyc new file mode 100644 index 0000000..efa1cdd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_structuralholes.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_summarization.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_summarization.cpython-312.pyc new file mode 100644 index 0000000..f13d9f9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_summarization.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-312.pyc new file mode 100644 index 0000000..4df47a2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_swap.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-312.pyc new file mode 100644 index 0000000..b8af104 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_threshold.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-312.pyc new file mode 100644 index 0000000..ef87e96 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_time_dependent.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-312.pyc new file mode 100644 index 0000000..ee21ad6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_tournament.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_triads.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_triads.cpython-312.pyc new file mode 100644 index 0000000..bec45fb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_triads.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_vitality.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_vitality.cpython-312.pyc new file mode 100644 index 0000000..15177ac Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_vitality.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_voronoi.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_voronoi.cpython-312.pyc new file mode 100644 index 0000000..c814fbb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_voronoi.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_walks.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_walks.cpython-312.pyc new file mode 100644 index 0000000..ee471b6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_walks.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_wiener.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_wiener.cpython-312.pyc new file mode 100644 index 0000000..47042af Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/__pycache__/test_wiener.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_asteroidal.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_asteroidal.py new file mode 100644 index 0000000..67131b2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_asteroidal.py @@ -0,0 +1,23 @@ +import networkx as nx + + +def test_is_at_free(): + is_at_free = nx.asteroidal.is_at_free + + cycle = nx.cycle_graph(6) + assert not is_at_free(cycle) + + path = nx.path_graph(6) + assert is_at_free(path) + + small_graph = nx.complete_graph(2) + assert is_at_free(small_graph) + + petersen = nx.petersen_graph() + assert not is_at_free(petersen) + + clique = nx.complete_graph(6) + assert is_at_free(clique) + + line_clique = nx.line_graph(clique) + assert not is_at_free(line_clique) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_boundary.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_boundary.py new file mode 100644 index 0000000..856be46 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_boundary.py @@ -0,0 +1,154 @@ +"""Unit tests for the :mod:`networkx.algorithms.boundary` module.""" + +from itertools import combinations + +import pytest + +import networkx as nx +from networkx import convert_node_labels_to_integers as cnlti +from networkx.utils import edges_equal + + +class TestNodeBoundary: + """Unit tests for the :func:`~networkx.node_boundary` function.""" + + def test_null_graph(self): + """Tests that the null graph has empty node boundaries.""" + null = nx.null_graph() + assert nx.node_boundary(null, []) == set() + assert nx.node_boundary(null, [], []) == set() + assert nx.node_boundary(null, [1, 2, 3]) == set() + assert nx.node_boundary(null, [1, 2, 3], [4, 5, 6]) == set() + assert nx.node_boundary(null, [1, 2, 3], [3, 4, 5]) == set() + + def test_path_graph(self): + P10 = cnlti(nx.path_graph(10), first_label=1) + assert nx.node_boundary(P10, []) == set() + assert nx.node_boundary(P10, [], []) == set() + assert nx.node_boundary(P10, [1, 2, 3]) == {4} + assert nx.node_boundary(P10, [4, 5, 6]) == {3, 7} + assert nx.node_boundary(P10, [3, 4, 5, 6, 7]) == {2, 8} + assert nx.node_boundary(P10, [8, 9, 10]) == {7} + assert nx.node_boundary(P10, [4, 5, 6], [9, 10]) == set() + + def test_complete_graph(self): + K10 = cnlti(nx.complete_graph(10), first_label=1) + assert nx.node_boundary(K10, []) == set() + assert nx.node_boundary(K10, [], []) == set() + assert nx.node_boundary(K10, [1, 2, 3]) == {4, 5, 6, 7, 8, 9, 10} + assert nx.node_boundary(K10, [4, 5, 6]) == {1, 2, 3, 7, 8, 9, 10} + assert nx.node_boundary(K10, [3, 4, 5, 6, 7]) == {1, 2, 8, 9, 10} + assert nx.node_boundary(K10, [4, 5, 6], []) == set() + assert nx.node_boundary(K10, K10) == set() + assert nx.node_boundary(K10, [1, 2, 3], [3, 4, 5]) == {4, 5} + + def test_petersen(self): + """Check boundaries in the petersen graph + + cheeger(G,k)=min(|bdy(S)|/|S| for |S|=k, 0>> list(cycles("abc")) + [('a', 'b', 'c'), ('b', 'c', 'a'), ('c', 'a', 'b')] + + """ + n = len(seq) + cycled_seq = cycle(seq) + for x in seq: + yield tuple(islice(cycled_seq, n)) + next(cycled_seq) + + +def cyclic_equals(seq1, seq2): + """Decide whether two sequences are equal up to cyclic permutations. + + For example:: + + >>> cyclic_equals("xyz", "zxy") + True + >>> cyclic_equals("xyz", "zyx") + False + + """ + # Cast seq2 to a tuple since `cycles()` yields tuples. + seq2 = tuple(seq2) + return any(x == tuple(seq2) for x in cycles(seq1)) + + +class TestChainDecomposition: + """Unit tests for the chain decomposition function.""" + + def assertContainsChain(self, chain, expected): + # A cycle could be expressed in two different orientations, one + # forward and one backward, so we need to check for cyclic + # equality in both orientations. + reversed_chain = list(reversed([tuple(reversed(e)) for e in chain])) + for candidate in expected: + if cyclic_equals(chain, candidate): + break + if cyclic_equals(reversed_chain, candidate): + break + else: + self.fail("chain not found") + + def test_decomposition(self): + edges = [ + # DFS tree edges. + (1, 2), + (2, 3), + (3, 4), + (3, 5), + (5, 6), + (6, 7), + (7, 8), + (5, 9), + (9, 10), + # Nontree edges. + (1, 3), + (1, 4), + (2, 5), + (5, 10), + (6, 8), + ] + G = nx.Graph(edges) + expected = [ + [(1, 3), (3, 2), (2, 1)], + [(1, 4), (4, 3)], + [(2, 5), (5, 3)], + [(5, 10), (10, 9), (9, 5)], + [(6, 8), (8, 7), (7, 6)], + ] + chains = list(nx.chain_decomposition(G, root=1)) + assert len(chains) == len(expected) + + def test_barbell_graph(self): + # The (3, 0) barbell graph has two triangles joined by a single edge. + G = nx.barbell_graph(3, 0) + chains = list(nx.chain_decomposition(G, root=0)) + expected = [[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]] + assert len(chains) == len(expected) + for chain in chains: + self.assertContainsChain(chain, expected) + + def test_disconnected_graph(self): + """Test for a graph with multiple connected components.""" + G = nx.barbell_graph(3, 0) + H = nx.barbell_graph(3, 0) + mapping = dict(zip(range(6), "abcdef")) + nx.relabel_nodes(H, mapping, copy=False) + G = nx.union(G, H) + chains = list(nx.chain_decomposition(G)) + expected = [ + [(0, 1), (1, 2), (2, 0)], + [(3, 4), (4, 5), (5, 3)], + [("a", "b"), ("b", "c"), ("c", "a")], + [("d", "e"), ("e", "f"), ("f", "d")], + ] + assert len(chains) == len(expected) + for chain in chains: + self.assertContainsChain(chain, expected) + + def test_disconnected_graph_root_node(self): + """Test for a single component of a disconnected graph.""" + G = nx.barbell_graph(3, 0) + H = nx.barbell_graph(3, 0) + mapping = dict(zip(range(6), "abcdef")) + nx.relabel_nodes(H, mapping, copy=False) + G = nx.union(G, H) + chains = list(nx.chain_decomposition(G, root="a")) + expected = [ + [("a", "b"), ("b", "c"), ("c", "a")], + [("d", "e"), ("e", "f"), ("f", "d")], + ] + assert len(chains) == len(expected) + for chain in chains: + self.assertContainsChain(chain, expected) + + def test_chain_decomposition_root_not_in_G(self): + """Test chain decomposition when root is not in graph""" + G = nx.Graph() + G.add_nodes_from([1, 2, 3]) + with pytest.raises(nx.NodeNotFound): + nx.has_bridges(G, root=6) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chordal.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chordal.py new file mode 100644 index 0000000..148b22f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_chordal.py @@ -0,0 +1,129 @@ +import pytest + +import networkx as nx + + +class TestMCS: + @classmethod + def setup_class(cls): + # simple graph + connected_chordal_G = nx.Graph() + connected_chordal_G.add_edges_from( + [ + (1, 2), + (1, 3), + (2, 3), + (2, 4), + (3, 4), + (3, 5), + (3, 6), + (4, 5), + (4, 6), + (5, 6), + ] + ) + cls.connected_chordal_G = connected_chordal_G + + chordal_G = nx.Graph() + chordal_G.add_edges_from( + [ + (1, 2), + (1, 3), + (2, 3), + (2, 4), + (3, 4), + (3, 5), + (3, 6), + (4, 5), + (4, 6), + (5, 6), + (7, 8), + ] + ) + chordal_G.add_node(9) + cls.chordal_G = chordal_G + + non_chordal_G = nx.Graph() + non_chordal_G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5), (3, 4), (3, 5)]) + cls.non_chordal_G = non_chordal_G + + self_loop_G = nx.Graph() + self_loop_G.add_edges_from([(1, 1)]) + cls.self_loop_G = self_loop_G + + @pytest.mark.parametrize("G", (nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph())) + def test_is_chordal_not_implemented(self, G): + with pytest.raises(nx.NetworkXNotImplemented): + nx.is_chordal(G) + + def test_is_chordal(self): + assert not nx.is_chordal(self.non_chordal_G) + assert nx.is_chordal(self.chordal_G) + assert nx.is_chordal(self.connected_chordal_G) + assert nx.is_chordal(nx.Graph()) + assert nx.is_chordal(nx.complete_graph(3)) + assert nx.is_chordal(nx.cycle_graph(3)) + assert not nx.is_chordal(nx.cycle_graph(5)) + assert nx.is_chordal(self.self_loop_G) + + def test_induced_nodes(self): + G = nx.generators.classic.path_graph(10) + Induced_nodes = nx.find_induced_nodes(G, 1, 9, 2) + assert Induced_nodes == {1, 2, 3, 4, 5, 6, 7, 8, 9} + pytest.raises( + nx.NetworkXTreewidthBoundExceeded, nx.find_induced_nodes, G, 1, 9, 1 + ) + Induced_nodes = nx.find_induced_nodes(self.chordal_G, 1, 6) + assert Induced_nodes == {1, 2, 4, 6} + pytest.raises(nx.NetworkXError, nx.find_induced_nodes, self.non_chordal_G, 1, 5) + + def test_graph_treewidth(self): + with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"): + nx.chordal_graph_treewidth(self.non_chordal_G) + + def test_chordal_find_cliques(self): + cliques = { + frozenset([9]), + frozenset([7, 8]), + frozenset([1, 2, 3]), + frozenset([2, 3, 4]), + frozenset([3, 4, 5, 6]), + } + assert set(nx.chordal_graph_cliques(self.chordal_G)) == cliques + with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"): + set(nx.chordal_graph_cliques(self.non_chordal_G)) + with pytest.raises(nx.NetworkXError, match="Input graph is not chordal"): + set(nx.chordal_graph_cliques(self.self_loop_G)) + + def test_chordal_find_cliques_path(self): + G = nx.path_graph(10) + cliqueset = nx.chordal_graph_cliques(G) + for u, v in G.edges(): + assert frozenset([u, v]) in cliqueset or frozenset([v, u]) in cliqueset + + def test_chordal_find_cliquesCC(self): + cliques = {frozenset([1, 2, 3]), frozenset([2, 3, 4]), frozenset([3, 4, 5, 6])} + cgc = nx.chordal_graph_cliques + assert set(cgc(self.connected_chordal_G)) == cliques + + def test_complete_to_chordal_graph(self): + fgrg = nx.fast_gnp_random_graph + test_graphs = [ + nx.barbell_graph(6, 2), + nx.cycle_graph(15), + nx.wheel_graph(20), + nx.grid_graph([10, 4]), + nx.ladder_graph(15), + nx.star_graph(5), + nx.bull_graph(), + fgrg(20, 0.3, seed=1), + ] + for G in test_graphs: + H, a = nx.complete_to_chordal_graph(G) + assert nx.is_chordal(H) + assert len(a) == H.number_of_nodes() + if nx.is_chordal(G): + assert G.number_of_edges() == H.number_of_edges() + assert set(a.values()) == {0} + else: + assert len(set(a.values())) == H.number_of_nodes() diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_clique.py new file mode 100644 index 0000000..3bee210 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_clique.py @@ -0,0 +1,291 @@ +import pytest + +import networkx as nx +from networkx import convert_node_labels_to_integers as cnlti + + +class TestCliques: + def setup_method(self): + z = [3, 4, 3, 4, 2, 4, 2, 1, 1, 1, 1] + self.G = cnlti(nx.generators.havel_hakimi_graph(z), first_label=1) + self.cl = list(nx.find_cliques(self.G)) + H = nx.complete_graph(6) + H = nx.relabel_nodes(H, {i: i + 1 for i in range(6)}) + H.remove_edges_from([(2, 6), (2, 5), (2, 4), (1, 3), (5, 3)]) + self.H = H + + def test_find_cliques1(self): + cl = list(nx.find_cliques(self.G)) + rcl = nx.find_cliques_recursive(self.G) + expected = [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]] + assert sorted(map(sorted, cl)) == sorted(map(sorted, rcl)) + assert sorted(map(sorted, cl)) == sorted(map(sorted, expected)) + + def test_selfloops(self): + self.G.add_edge(1, 1) + cl = list(nx.find_cliques(self.G)) + rcl = list(nx.find_cliques_recursive(self.G)) + assert set(map(frozenset, cl)) == set(map(frozenset, rcl)) + answer = [{2, 6, 1, 3}, {2, 6, 4}, {5, 4, 7}, {8, 9}, {10, 11}] + assert len(answer) == len(cl) + assert all(set(c) in answer for c in cl) + + def test_find_cliques2(self): + hcl = list(nx.find_cliques(self.H)) + assert sorted(map(sorted, hcl)) == [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]] + + def test_find_cliques3(self): + # all cliques are [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]] + + cl = list(nx.find_cliques(self.G, [2])) + rcl = nx.find_cliques_recursive(self.G, [2]) + expected = [[2, 6, 1, 3], [2, 6, 4]] + assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected)) + assert sorted(map(sorted, cl)) == sorted(map(sorted, expected)) + + cl = list(nx.find_cliques(self.G, [2, 3])) + rcl = nx.find_cliques_recursive(self.G, [2, 3]) + expected = [[2, 6, 1, 3]] + assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected)) + assert sorted(map(sorted, cl)) == sorted(map(sorted, expected)) + + cl = list(nx.find_cliques(self.G, [2, 6, 4])) + rcl = nx.find_cliques_recursive(self.G, [2, 6, 4]) + expected = [[2, 6, 4]] + assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected)) + assert sorted(map(sorted, cl)) == sorted(map(sorted, expected)) + + cl = list(nx.find_cliques(self.G, [2, 6, 4])) + rcl = nx.find_cliques_recursive(self.G, [2, 6, 4]) + expected = [[2, 6, 4]] + assert sorted(map(sorted, rcl)) == sorted(map(sorted, expected)) + assert sorted(map(sorted, cl)) == sorted(map(sorted, expected)) + + with pytest.raises(ValueError): + list(nx.find_cliques(self.G, [2, 6, 4, 1])) + + with pytest.raises(ValueError): + list(nx.find_cliques_recursive(self.G, [2, 6, 4, 1])) + + def test_number_of_cliques(self): + G = self.G + assert nx.number_of_cliques(G, 1) == 1 + assert list(nx.number_of_cliques(G, [1]).values()) == [1] + assert list(nx.number_of_cliques(G, [1, 2]).values()) == [1, 2] + assert nx.number_of_cliques(G, [1, 2]) == {1: 1, 2: 2} + assert nx.number_of_cliques(G, 2) == 2 + assert nx.number_of_cliques(G) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, nodes=list(G)) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, nodes=[2, 3, 4]) == {2: 2, 3: 1, 4: 2} + assert nx.number_of_cliques(G, cliques=self.cl) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + assert nx.number_of_cliques(G, list(G), cliques=self.cl) == { + 1: 1, + 2: 2, + 3: 1, + 4: 2, + 5: 1, + 6: 2, + 7: 1, + 8: 1, + 9: 1, + 10: 1, + 11: 1, + } + + def test_node_clique_number(self): + G = self.G + assert nx.node_clique_number(G, 1) == 4 + assert list(nx.node_clique_number(G, [1]).values()) == [4] + assert list(nx.node_clique_number(G, [1, 2]).values()) == [4, 4] + assert nx.node_clique_number(G, [1, 2]) == {1: 4, 2: 4} + assert nx.node_clique_number(G, 1) == 4 + assert nx.node_clique_number(G) == { + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 4, + 7: 3, + 8: 2, + 9: 2, + 10: 2, + 11: 2, + } + assert nx.node_clique_number(G, cliques=self.cl) == { + 1: 4, + 2: 4, + 3: 4, + 4: 3, + 5: 3, + 6: 4, + 7: 3, + 8: 2, + 9: 2, + 10: 2, + 11: 2, + } + assert nx.node_clique_number(G, [1, 2], cliques=self.cl) == {1: 4, 2: 4} + assert nx.node_clique_number(G, 1, cliques=self.cl) == 4 + + def test_make_clique_bipartite(self): + G = self.G + B = nx.make_clique_bipartite(G) + assert sorted(B) == [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + # Project onto the nodes of the original graph. + H = nx.projected_graph(B, range(1, 12)) + assert H.adj == G.adj + # Project onto the nodes representing the cliques. + H1 = nx.projected_graph(B, range(-5, 0)) + # Relabel the negative numbers as positive ones. + H1 = nx.relabel_nodes(H1, {-v: v for v in range(1, 6)}) + assert sorted(H1) == [1, 2, 3, 4, 5] + + def test_make_max_clique_graph(self): + """Tests that the maximal clique graph is the same as the bipartite + clique graph after being projected onto the nodes representing the + cliques. + + """ + G = self.G + B = nx.make_clique_bipartite(G) + # Project onto the nodes representing the cliques. + H1 = nx.projected_graph(B, range(-5, 0)) + # Relabel the negative numbers as nonnegative ones, starting at + # 0. + H1 = nx.relabel_nodes(H1, {-v: v - 1 for v in range(1, 6)}) + H2 = nx.make_max_clique_graph(G) + assert H1.adj == H2.adj + + def test_directed(self): + with pytest.raises(nx.NetworkXNotImplemented): + next(nx.find_cliques(nx.DiGraph())) + + def test_find_cliques_trivial(self): + G = nx.Graph() + assert sorted(nx.find_cliques(G)) == [] + assert sorted(nx.find_cliques_recursive(G)) == [] + + def test_make_max_clique_graph_create_using(self): + G = nx.Graph([(1, 2), (3, 1), (4, 1), (5, 6)]) + E = nx.Graph([(0, 1), (0, 2), (1, 2)]) + E.add_node(3) + assert nx.is_isomorphic(nx.make_max_clique_graph(G, create_using=nx.Graph), E) + + +class TestEnumerateAllCliques: + def test_paper_figure_4(self): + # Same graph as given in Fig. 4 of paper enumerate_all_cliques is + # based on. + # http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=1559964&isnumber=33129 + G = nx.Graph() + edges_fig_4 = [ + ("a", "b"), + ("a", "c"), + ("a", "d"), + ("a", "e"), + ("b", "c"), + ("b", "d"), + ("b", "e"), + ("c", "d"), + ("c", "e"), + ("d", "e"), + ("f", "b"), + ("f", "c"), + ("f", "g"), + ("g", "f"), + ("g", "c"), + ("g", "d"), + ("g", "e"), + ] + G.add_edges_from(edges_fig_4) + + cliques = list(nx.enumerate_all_cliques(G)) + clique_sizes = list(map(len, cliques)) + assert sorted(clique_sizes) == clique_sizes + + expected_cliques = [ + ["a"], + ["b"], + ["c"], + ["d"], + ["e"], + ["f"], + ["g"], + ["a", "b"], + ["a", "b", "d"], + ["a", "b", "d", "e"], + ["a", "b", "e"], + ["a", "c"], + ["a", "c", "d"], + ["a", "c", "d", "e"], + ["a", "c", "e"], + ["a", "d"], + ["a", "d", "e"], + ["a", "e"], + ["b", "c"], + ["b", "c", "d"], + ["b", "c", "d", "e"], + ["b", "c", "e"], + ["b", "c", "f"], + ["b", "d"], + ["b", "d", "e"], + ["b", "e"], + ["b", "f"], + ["c", "d"], + ["c", "d", "e"], + ["c", "d", "e", "g"], + ["c", "d", "g"], + ["c", "e"], + ["c", "e", "g"], + ["c", "f"], + ["c", "f", "g"], + ["c", "g"], + ["d", "e"], + ["d", "e", "g"], + ["d", "g"], + ["e", "g"], + ["f", "g"], + ["a", "b", "c"], + ["a", "b", "c", "d"], + ["a", "b", "c", "d", "e"], + ["a", "b", "c", "e"], + ] + + assert sorted(map(sorted, cliques)) == sorted(map(sorted, expected_cliques)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cluster.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cluster.py new file mode 100644 index 0000000..6d2a410 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cluster.py @@ -0,0 +1,584 @@ +import pytest + +import networkx as nx + + +def test_square_clustering_adjacent_squares(): + G = nx.Graph([(1, 2), (1, 3), (2, 4), (3, 4), (3, 5), (4, 6), (5, 6)]) + # Corner nodes: C_4 == 0.5, central face nodes: C_4 = 1 / 3 + expected = {1: 0.5, 2: 0.5, 3: 1 / 3, 4: 1 / 3, 5: 0.5, 6: 0.5} + assert nx.square_clustering(G) == expected + + +def test_square_clustering_2d_grid(): + G = nx.grid_2d_graph(3, 3) + # Central node: 4 squares out of 20 potential + expected = { + (0, 0): 1 / 3, + (0, 1): 0.25, + (0, 2): 1 / 3, + (1, 0): 0.25, + (1, 1): 0.2, + (1, 2): 0.25, + (2, 0): 1 / 3, + (2, 1): 0.25, + (2, 2): 1 / 3, + } + assert nx.square_clustering(G) == expected + + +def test_square_clustering_multiple_squares_non_complete(): + """An example where all nodes are part of all squares, but not every node + is connected to every other.""" + G = nx.Graph([(0, 1), (0, 2), (1, 3), (2, 3), (1, 4), (2, 4), (1, 5), (2, 5)]) + expected = dict.fromkeys(G, 1) + assert nx.square_clustering(G) == expected + + +class TestTriangles: + def test_empty(self): + G = nx.Graph() + assert list(nx.triangles(G).values()) == [] + + def test_path(self): + G = nx.path_graph(10) + assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + assert nx.triangles(G) == { + 0: 0, + 1: 0, + 2: 0, + 3: 0, + 4: 0, + 5: 0, + 6: 0, + 7: 0, + 8: 0, + 9: 0, + } + + def test_cubical(self): + G = nx.cubical_graph() + assert list(nx.triangles(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0] + assert nx.triangles(G, 1) == 0 + assert list(nx.triangles(G, [1, 2]).values()) == [0, 0] + assert nx.triangles(G, 1) == 0 + assert nx.triangles(G, [1, 2]) == {1: 0, 2: 0} + + def test_k5(self): + G = nx.complete_graph(5) + assert list(nx.triangles(G).values()) == [6, 6, 6, 6, 6] + assert sum(nx.triangles(G).values()) / 3 == 10 + assert nx.triangles(G, 1) == 6 + G.remove_edge(1, 2) + assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5] + assert nx.triangles(G, 1) == 3 + G.add_edge(3, 3) # ignore self-edges + assert list(nx.triangles(G).values()) == [5, 3, 3, 5, 5] + assert nx.triangles(G, 3) == 5 + + +class TestDirectedClustering: + def test_clustering(self): + G = nx.DiGraph() + assert list(nx.clustering(G).values()) == [] + assert nx.clustering(G) == {} + + def test_path(self): + G = nx.path_graph(10, create_using=nx.DiGraph()) + assert list(nx.clustering(G).values()) == [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ] + assert nx.clustering(G) == { + 0: 0, + 1: 0, + 2: 0, + 3: 0, + 4: 0, + 5: 0, + 6: 0, + 7: 0, + 8: 0, + 9: 0, + } + assert nx.clustering(G, 0) == 0 + + def test_k5(self): + G = nx.complete_graph(5, create_using=nx.DiGraph()) + assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G) == 1 + G.remove_edge(1, 2) + assert list(nx.clustering(G).values()) == [ + 11 / 12, + 1, + 1, + 11 / 12, + 11 / 12, + ] + assert nx.clustering(G, [1, 4]) == {1: 1, 4: 11 / 12} + G.remove_edge(2, 1) + assert list(nx.clustering(G).values()) == [ + 5 / 6, + 1, + 1, + 5 / 6, + 5 / 6, + ] + assert nx.clustering(G, [1, 4]) == {1: 1, 4: 0.83333333333333337} + assert nx.clustering(G, 4) == 5 / 6 + + def test_triangle_and_edge(self): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(0, 4) + assert nx.clustering(G)[0] == 1 / 6 + + +class TestDirectedWeightedClustering: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + def test_clustering(self): + G = nx.DiGraph() + assert list(nx.clustering(G, weight="weight").values()) == [] + assert nx.clustering(G) == {} + + def test_path(self): + G = nx.path_graph(10, create_using=nx.DiGraph()) + assert list(nx.clustering(G, weight="weight").values()) == [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ] + assert nx.clustering(G, weight="weight") == { + 0: 0, + 1: 0, + 2: 0, + 3: 0, + 4: 0, + 5: 0, + 6: 0, + 7: 0, + 8: 0, + 9: 0, + } + + def test_k5(self): + G = nx.complete_graph(5, create_using=nx.DiGraph()) + assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G, weight="weight") == 1 + G.remove_edge(1, 2) + assert list(nx.clustering(G, weight="weight").values()) == [ + 11 / 12, + 1, + 1, + 11 / 12, + 11 / 12, + ] + assert nx.clustering(G, [1, 4], weight="weight") == {1: 1, 4: 11 / 12} + G.remove_edge(2, 1) + assert list(nx.clustering(G, weight="weight").values()) == [ + 5 / 6, + 1, + 1, + 5 / 6, + 5 / 6, + ] + assert nx.clustering(G, [1, 4], weight="weight") == { + 1: 1, + 4: 0.83333333333333337, + } + + def test_triangle_and_edge(self): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(0, 4, weight=2) + assert nx.clustering(G)[0] == 1 / 6 + # Relaxed comparisons to allow graphblas-algorithms to pass tests + np.testing.assert_allclose(nx.clustering(G, weight="weight")[0], 1 / 12) + np.testing.assert_allclose(nx.clustering(G, 0, weight="weight"), 1 / 12) + + +class TestWeightedClustering: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + def test_clustering(self): + G = nx.Graph() + assert list(nx.clustering(G, weight="weight").values()) == [] + assert nx.clustering(G) == {} + + def test_path(self): + G = nx.path_graph(10) + assert list(nx.clustering(G, weight="weight").values()) == [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ] + assert nx.clustering(G, weight="weight") == { + 0: 0, + 1: 0, + 2: 0, + 3: 0, + 4: 0, + 5: 0, + 6: 0, + 7: 0, + 8: 0, + 9: 0, + } + + def test_cubical(self): + G = nx.cubical_graph() + assert list(nx.clustering(G, weight="weight").values()) == [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ] + assert nx.clustering(G, 1) == 0 + assert list(nx.clustering(G, [1, 2], weight="weight").values()) == [0, 0] + assert nx.clustering(G, 1, weight="weight") == 0 + assert nx.clustering(G, [1, 2], weight="weight") == {1: 0, 2: 0} + + def test_k5(self): + G = nx.complete_graph(5) + assert list(nx.clustering(G, weight="weight").values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G, weight="weight") == 1 + G.remove_edge(1, 2) + assert list(nx.clustering(G, weight="weight").values()) == [ + 5 / 6, + 1, + 1, + 5 / 6, + 5 / 6, + ] + assert nx.clustering(G, [1, 4], weight="weight") == { + 1: 1, + 4: 0.83333333333333337, + } + + def test_triangle_and_edge(self): + G = nx.cycle_graph(3) + G.add_edge(0, 4, weight=2) + assert nx.clustering(G)[0] == 1 / 3 + np.testing.assert_allclose(nx.clustering(G, weight="weight")[0], 1 / 6) + np.testing.assert_allclose(nx.clustering(G, 0, weight="weight"), 1 / 6) + + def test_triangle_and_signed_edge(self): + G = nx.cycle_graph(3) + G.add_edge(0, 1, weight=-1) + G.add_edge(3, 0, weight=0) + assert nx.clustering(G)[0] == 1 / 3 + assert nx.clustering(G, weight="weight")[0] == -1 / 3 + + +class TestClustering: + @classmethod + def setup_class(cls): + pytest.importorskip("numpy") + + def test_clustering(self): + G = nx.Graph() + assert list(nx.clustering(G).values()) == [] + assert nx.clustering(G) == {} + + def test_path(self): + G = nx.path_graph(10) + assert list(nx.clustering(G).values()) == [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ] + assert nx.clustering(G) == { + 0: 0, + 1: 0, + 2: 0, + 3: 0, + 4: 0, + 5: 0, + 6: 0, + 7: 0, + 8: 0, + 9: 0, + } + + def test_cubical(self): + G = nx.cubical_graph() + assert list(nx.clustering(G).values()) == [0, 0, 0, 0, 0, 0, 0, 0] + assert nx.clustering(G, 1) == 0 + assert list(nx.clustering(G, [1, 2]).values()) == [0, 0] + assert nx.clustering(G, 1) == 0 + assert nx.clustering(G, [1, 2]) == {1: 0, 2: 0} + + def test_k5(self): + G = nx.complete_graph(5) + assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G) == 1 + G.remove_edge(1, 2) + assert list(nx.clustering(G).values()) == [ + 5 / 6, + 1, + 1, + 5 / 6, + 5 / 6, + ] + assert nx.clustering(G, [1, 4]) == {1: 1, 4: 0.83333333333333337} + + def test_k5_signed(self): + G = nx.complete_graph(5) + assert list(nx.clustering(G).values()) == [1, 1, 1, 1, 1] + assert nx.average_clustering(G) == 1 + G.remove_edge(1, 2) + G.add_edge(0, 1, weight=-1) + assert list(nx.clustering(G, weight="weight").values()) == [ + 1 / 6, + -1 / 3, + 1, + 3 / 6, + 3 / 6, + ] + + +class TestTransitivity: + def test_transitivity(self): + G = nx.Graph() + assert nx.transitivity(G) == 0 + + def test_path(self): + G = nx.path_graph(10) + assert nx.transitivity(G) == 0 + + def test_cubical(self): + G = nx.cubical_graph() + assert nx.transitivity(G) == 0 + + def test_k5(self): + G = nx.complete_graph(5) + assert nx.transitivity(G) == 1 + G.remove_edge(1, 2) + assert nx.transitivity(G) == 0.875 + + +class TestSquareClustering: + def test_clustering(self): + G = nx.Graph() + assert list(nx.square_clustering(G).values()) == [] + assert nx.square_clustering(G) == {} + + def test_path(self): + G = nx.path_graph(10) + assert list(nx.square_clustering(G).values()) == [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + ] + assert nx.square_clustering(G) == { + 0: 0, + 1: 0, + 2: 0, + 3: 0, + 4: 0, + 5: 0, + 6: 0, + 7: 0, + 8: 0, + 9: 0, + } + + def test_cubical(self): + G = nx.cubical_graph() + assert list(nx.square_clustering(G).values()) == [ + 1 / 3, + 1 / 3, + 1 / 3, + 1 / 3, + 1 / 3, + 1 / 3, + 1 / 3, + 1 / 3, + ] + assert list(nx.square_clustering(G, [1, 2]).values()) == [1 / 3, 1 / 3] + assert nx.square_clustering(G, [1])[1] == 1 / 3 + assert nx.square_clustering(G, 1) == 1 / 3 + assert nx.square_clustering(G, [1, 2]) == {1: 1 / 3, 2: 1 / 3} + + def test_k5(self): + G = nx.complete_graph(5) + assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1] + + def test_bipartite_k5(self): + G = nx.complete_bipartite_graph(5, 5) + assert list(nx.square_clustering(G).values()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + + def test_lind_square_clustering(self): + """Test C4 for figure 1 Lind et al (2005)""" + G = nx.Graph( + [ + (1, 2), + (1, 3), + (1, 6), + (1, 7), + (2, 4), + (2, 5), + (3, 4), + (3, 5), + (6, 7), + (7, 8), + (6, 8), + (7, 9), + (7, 10), + (6, 11), + (6, 12), + (2, 13), + (2, 14), + (3, 15), + (3, 16), + ] + ) + G1 = G.subgraph([1, 2, 3, 4, 5, 13, 14, 15, 16]) + G2 = G.subgraph([1, 6, 7, 8, 9, 10, 11, 12]) + assert nx.square_clustering(G, [1])[1] == 3 / 43 + assert nx.square_clustering(G1, [1])[1] == 2 / 6 + assert nx.square_clustering(G2, [1])[1] == 1 / 5 + + def test_peng_square_clustering(self): + """Test eq2 for figure 1 Peng et al (2008)""" + # Example graph from figure 1b + G = nx.Graph([(1, 2), (1, 3), (2, 4), (3, 4), (3, 5), (3, 6)]) + # From table 1, row 2 + expected = {1: 1 / 3, 2: 1, 3: 0.2, 4: 1 / 3, 5: 0, 6: 0} + assert nx.square_clustering(G) == expected + + def test_self_loops_square_clustering(self): + G = nx.path_graph(5) + assert nx.square_clustering(G) == {0: 0, 1: 0, 2: 0, 3: 0, 4: 0} + G.add_edges_from([(0, 0), (1, 1), (2, 2)]) + assert nx.square_clustering(G) == {0: 0, 1: 0, 2: 0, 3: 0, 4: 0} + + +class TestAverageClustering: + @classmethod + def setup_class(cls): + pytest.importorskip("numpy") + + def test_empty(self): + G = nx.Graph() + with pytest.raises(ZeroDivisionError): + nx.average_clustering(G) + + def test_average_clustering(self): + G = nx.cycle_graph(3) + G.add_edge(2, 3) + assert nx.average_clustering(G) == (1 + 1 + 1 / 3) / 4 + assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3) / 4 + assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3) / 3 + assert nx.average_clustering(G, [1, 2, 3]) == (1 + 1 / 3) / 3 + assert nx.average_clustering(G, [1, 2, 3], count_zeros=True) == (1 + 1 / 3) / 3 + assert nx.average_clustering(G, [1, 2, 3], count_zeros=False) == (1 + 1 / 3) / 2 + + def test_average_clustering_signed(self): + G = nx.cycle_graph(3) + G.add_edge(2, 3) + G.add_edge(0, 1, weight=-1) + assert nx.average_clustering(G, weight="weight") == (-1 - 1 - 1 / 3) / 4 + assert ( + nx.average_clustering(G, weight="weight", count_zeros=True) + == (-1 - 1 - 1 / 3) / 4 + ) + assert ( + nx.average_clustering(G, weight="weight", count_zeros=False) + == (-1 - 1 - 1 / 3) / 3 + ) + + +class TestDirectedAverageClustering: + @classmethod + def setup_class(cls): + pytest.importorskip("numpy") + + def test_empty(self): + G = nx.DiGraph() + with pytest.raises(ZeroDivisionError): + nx.average_clustering(G) + + def test_average_clustering(self): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(2, 3) + assert nx.average_clustering(G) == (1 + 1 + 1 / 3) / 8 + assert nx.average_clustering(G, count_zeros=True) == (1 + 1 + 1 / 3) / 8 + assert nx.average_clustering(G, count_zeros=False) == (1 + 1 + 1 / 3) / 6 + assert nx.average_clustering(G, [1, 2, 3]) == (1 + 1 / 3) / 6 + assert nx.average_clustering(G, [1, 2, 3], count_zeros=True) == (1 + 1 / 3) / 6 + assert nx.average_clustering(G, [1, 2, 3], count_zeros=False) == (1 + 1 / 3) / 4 + + +class TestGeneralizedDegree: + def test_generalized_degree(self): + G = nx.Graph() + assert nx.generalized_degree(G) == {} + + def test_path(self): + G = nx.path_graph(5) + assert nx.generalized_degree(G, 0) == {0: 1} + assert nx.generalized_degree(G, 1) == {0: 2} + + def test_cubical(self): + G = nx.cubical_graph() + assert nx.generalized_degree(G, 0) == {0: 3} + + def test_k5(self): + G = nx.complete_graph(5) + assert nx.generalized_degree(G, 0) == {3: 4} + G.remove_edge(0, 1) + assert nx.generalized_degree(G, 0) == {2: 3} + assert nx.generalized_degree(G, [1, 2]) == {1: {2: 3}, 2: {2: 2, 3: 2}} + assert nx.generalized_degree(G) == { + 0: {2: 3}, + 1: {2: 3}, + 2: {2: 2, 3: 2}, + 3: {2: 2, 3: 2}, + 4: {2: 2, 3: 2}, + } diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_communicability.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_communicability.py new file mode 100644 index 0000000..0f44709 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_communicability.py @@ -0,0 +1,80 @@ +from collections import defaultdict + +import pytest + +pytest.importorskip("numpy") +pytest.importorskip("scipy") + +import networkx as nx +from networkx.algorithms.communicability_alg import communicability, communicability_exp + + +class TestCommunicability: + def test_communicability(self): + answer = { + 0: {0: 1.5430806348152435, 1: 1.1752011936438012}, + 1: {0: 1.1752011936438012, 1: 1.5430806348152435}, + } + # answer={(0, 0): 1.5430806348152435, + # (0, 1): 1.1752011936438012, + # (1, 0): 1.1752011936438012, + # (1, 1): 1.5430806348152435} + + result = communicability(nx.path_graph(2)) + for k1, val in result.items(): + for k2 in val: + assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7) + + def test_communicability2(self): + answer_orig = { + ("1", "1"): 1.6445956054135658, + ("1", "Albert"): 0.7430186221096251, + ("1", "Aric"): 0.7430186221096251, + ("1", "Dan"): 1.6208126320442937, + ("1", "Franck"): 0.42639707170035257, + ("Albert", "1"): 0.7430186221096251, + ("Albert", "Albert"): 2.4368257358712189, + ("Albert", "Aric"): 1.4368257358712191, + ("Albert", "Dan"): 2.0472097037446453, + ("Albert", "Franck"): 1.8340111678944691, + ("Aric", "1"): 0.7430186221096251, + ("Aric", "Albert"): 1.4368257358712191, + ("Aric", "Aric"): 2.4368257358712193, + ("Aric", "Dan"): 2.0472097037446457, + ("Aric", "Franck"): 1.8340111678944691, + ("Dan", "1"): 1.6208126320442937, + ("Dan", "Albert"): 2.0472097037446453, + ("Dan", "Aric"): 2.0472097037446457, + ("Dan", "Dan"): 3.1306328496328168, + ("Dan", "Franck"): 1.4860372442192515, + ("Franck", "1"): 0.42639707170035257, + ("Franck", "Albert"): 1.8340111678944691, + ("Franck", "Aric"): 1.8340111678944691, + ("Franck", "Dan"): 1.4860372442192515, + ("Franck", "Franck"): 2.3876142275231915, + } + + answer = defaultdict(dict) + for (k1, k2), v in answer_orig.items(): + answer[k1][k2] = v + + G1 = nx.Graph( + [ + ("Franck", "Aric"), + ("Aric", "Dan"), + ("Dan", "Albert"), + ("Albert", "Franck"), + ("Dan", "1"), + ("Franck", "Albert"), + ] + ) + + result = communicability(G1) + for k1, val in result.items(): + for k2 in val: + assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7) + + result = communicability_exp(G1) + for k1, val in result.items(): + for k2 in val: + assert answer[k1][k2] == pytest.approx(result[k1][k2], abs=1e-7) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_core.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_core.py new file mode 100644 index 0000000..7cbaf75 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_core.py @@ -0,0 +1,266 @@ +import pytest + +import networkx as nx +from networkx.utils import nodes_equal + + +class TestCore: + @classmethod + def setup_class(cls): + # G is the example graph in Figure 1 from Batagelj and + # Zaversnik's paper titled An O(m) Algorithm for Cores + # Decomposition of Networks, 2003, + # http://arXiv.org/abs/cs/0310049. With nodes labeled as + # shown, the 3-core is given by nodes 1-8, the 2-core by nodes + # 9-16, the 1-core by nodes 17-20 and node 21 is in the + # 0-core. + t1 = nx.convert_node_labels_to_integers(nx.tetrahedral_graph(), 1) + t2 = nx.convert_node_labels_to_integers(t1, 5) + G = nx.union(t1, t2) + G.add_edges_from( + [ + (3, 7), + (2, 11), + (11, 5), + (11, 12), + (5, 12), + (12, 19), + (12, 18), + (3, 9), + (7, 9), + (7, 10), + (9, 10), + (9, 20), + (17, 13), + (13, 14), + (14, 15), + (15, 16), + (16, 13), + ] + ) + G.add_node(21) + cls.G = G + + # Create the graph H resulting from the degree sequence + # [0, 1, 2, 2, 2, 2, 3] when using the Havel-Hakimi algorithm. + + degseq = [0, 1, 2, 2, 2, 2, 3] + H = nx.havel_hakimi_graph(degseq) + mapping = {6: 0, 0: 1, 4: 3, 5: 6, 3: 4, 1: 2, 2: 5} + cls.H = nx.relabel_nodes(H, mapping) + + def test_trivial(self): + """Empty graph""" + G = nx.Graph() + assert nx.core_number(G) == {} + + def test_core_number(self): + core = nx.core_number(self.G) + nodes_by_core = [sorted(n for n in core if core[n] == val) for val in range(4)] + assert nodes_equal(nodes_by_core[0], [21]) + assert nodes_equal(nodes_by_core[1], [17, 18, 19, 20]) + assert nodes_equal(nodes_by_core[2], [9, 10, 11, 12, 13, 14, 15, 16]) + assert nodes_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8]) + + def test_core_number2(self): + core = nx.core_number(self.H) + nodes_by_core = [sorted(n for n in core if core[n] == val) for val in range(3)] + assert nodes_equal(nodes_by_core[0], [0]) + assert nodes_equal(nodes_by_core[1], [1, 3]) + assert nodes_equal(nodes_by_core[2], [2, 4, 5, 6]) + + def test_core_number_multigraph(self): + G = nx.complete_graph(3) + G = nx.MultiGraph(G) + G.add_edge(1, 2) + with pytest.raises( + nx.NetworkXNotImplemented, match="not implemented for multigraph type" + ): + nx.core_number(G) + + def test_core_number_self_loop(self): + G = nx.cycle_graph(3) + G.add_edge(0, 0) + with pytest.raises( + nx.NetworkXNotImplemented, match="Input graph has self loops" + ): + nx.core_number(G) + + def test_directed_core_number(self): + """core number had a bug for directed graphs found in issue #1959""" + # small example where too timid edge removal can make cn[2] = 3 + G = nx.DiGraph() + edges = [(1, 2), (2, 1), (2, 3), (2, 4), (3, 4), (4, 3)] + G.add_edges_from(edges) + assert nx.core_number(G) == {1: 2, 2: 2, 3: 2, 4: 2} + # small example where too aggressive edge removal can make cn[2] = 2 + more_edges = [(1, 5), (3, 5), (4, 5), (3, 6), (4, 6), (5, 6)] + G.add_edges_from(more_edges) + assert nx.core_number(G) == {1: 3, 2: 3, 3: 3, 4: 3, 5: 3, 6: 3} + + def test_main_core(self): + main_core_subgraph = nx.k_core(self.H) + assert sorted(main_core_subgraph.nodes()) == [2, 4, 5, 6] + + def test_k_core(self): + # k=0 + k_core_subgraph = nx.k_core(self.H, k=0) + assert sorted(k_core_subgraph.nodes()) == sorted(self.H.nodes()) + # k=1 + k_core_subgraph = nx.k_core(self.H, k=1) + assert sorted(k_core_subgraph.nodes()) == [1, 2, 3, 4, 5, 6] + # k = 2 + k_core_subgraph = nx.k_core(self.H, k=2) + assert sorted(k_core_subgraph.nodes()) == [2, 4, 5, 6] + + def test_k_core_multigraph(self): + core_number = nx.core_number(self.H) + H = nx.MultiGraph(self.H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.k_core(H, k=0, core_number=core_number) + + def test_main_crust(self): + main_crust_subgraph = nx.k_crust(self.H) + assert sorted(main_crust_subgraph.nodes()) == [0, 1, 3] + + def test_k_crust(self): + # k = 0 + k_crust_subgraph = nx.k_crust(self.H, k=2) + assert sorted(k_crust_subgraph.nodes()) == sorted(self.H.nodes()) + # k=1 + k_crust_subgraph = nx.k_crust(self.H, k=1) + assert sorted(k_crust_subgraph.nodes()) == [0, 1, 3] + # k=2 + k_crust_subgraph = nx.k_crust(self.H, k=0) + assert sorted(k_crust_subgraph.nodes()) == [0] + + def test_k_crust_multigraph(self): + core_number = nx.core_number(self.H) + H = nx.MultiGraph(self.H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.k_crust(H, k=0, core_number=core_number) + + def test_main_shell(self): + main_shell_subgraph = nx.k_shell(self.H) + assert sorted(main_shell_subgraph.nodes()) == [2, 4, 5, 6] + + def test_k_shell(self): + # k=0 + k_shell_subgraph = nx.k_shell(self.H, k=2) + assert sorted(k_shell_subgraph.nodes()) == [2, 4, 5, 6] + # k=1 + k_shell_subgraph = nx.k_shell(self.H, k=1) + assert sorted(k_shell_subgraph.nodes()) == [1, 3] + # k=2 + k_shell_subgraph = nx.k_shell(self.H, k=0) + assert sorted(k_shell_subgraph.nodes()) == [0] + + def test_k_shell_multigraph(self): + core_number = nx.core_number(self.H) + H = nx.MultiGraph(self.H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.k_shell(H, k=0, core_number=core_number) + + def test_k_corona(self): + # k=0 + k_corona_subgraph = nx.k_corona(self.H, k=2) + assert sorted(k_corona_subgraph.nodes()) == [2, 4, 5, 6] + # k=1 + k_corona_subgraph = nx.k_corona(self.H, k=1) + assert sorted(k_corona_subgraph.nodes()) == [1] + # k=2 + k_corona_subgraph = nx.k_corona(self.H, k=0) + assert sorted(k_corona_subgraph.nodes()) == [0] + + def test_k_corona_multigraph(self): + core_number = nx.core_number(self.H) + H = nx.MultiGraph(self.H) + with pytest.raises(nx.NetworkXNotImplemented): + nx.k_corona(H, k=0, core_number=core_number) + + def test_k_truss(self): + # k=-1 + k_truss_subgraph = nx.k_truss(self.G, -1) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21)) + # k=0 + k_truss_subgraph = nx.k_truss(self.G, 0) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21)) + # k=1 + k_truss_subgraph = nx.k_truss(self.G, 1) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21)) + # k=2 + k_truss_subgraph = nx.k_truss(self.G, 2) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 21)) + # k=3 + k_truss_subgraph = nx.k_truss(self.G, 3) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 13)) + + k_truss_subgraph = nx.k_truss(self.G, 4) + assert sorted(k_truss_subgraph.nodes()) == list(range(1, 9)) + + k_truss_subgraph = nx.k_truss(self.G, 5) + assert sorted(k_truss_subgraph.nodes()) == [] + + def test_k_truss_digraph(self): + G = nx.complete_graph(3) + G = nx.DiGraph(G) + G.add_edge(2, 1) + with pytest.raises( + nx.NetworkXNotImplemented, match="not implemented for directed type" + ): + nx.k_truss(G, k=1) + + def test_k_truss_multigraph(self): + G = nx.complete_graph(3) + G = nx.MultiGraph(G) + G.add_edge(1, 2) + with pytest.raises( + nx.NetworkXNotImplemented, match="not implemented for multigraph type" + ): + nx.k_truss(G, k=1) + + def test_k_truss_self_loop(self): + G = nx.cycle_graph(3) + G.add_edge(0, 0) + with pytest.raises( + nx.NetworkXNotImplemented, match="Input graph has self loops" + ): + nx.k_truss(G, k=1) + + def test_onion_layers(self): + layers = nx.onion_layers(self.G) + nodes_by_layer = [ + sorted(n for n in layers if layers[n] == val) for val in range(1, 7) + ] + assert nodes_equal(nodes_by_layer[0], [21]) + assert nodes_equal(nodes_by_layer[1], [17, 18, 19, 20]) + assert nodes_equal(nodes_by_layer[2], [10, 12, 13, 14, 15, 16]) + assert nodes_equal(nodes_by_layer[3], [9, 11]) + assert nodes_equal(nodes_by_layer[4], [1, 2, 4, 5, 6, 8]) + assert nodes_equal(nodes_by_layer[5], [3, 7]) + + def test_onion_digraph(self): + G = nx.complete_graph(3) + G = nx.DiGraph(G) + G.add_edge(2, 1) + with pytest.raises( + nx.NetworkXNotImplemented, match="not implemented for directed type" + ): + nx.onion_layers(G) + + def test_onion_multigraph(self): + G = nx.complete_graph(3) + G = nx.MultiGraph(G) + G.add_edge(1, 2) + with pytest.raises( + nx.NetworkXNotImplemented, match="not implemented for multigraph type" + ): + nx.onion_layers(G) + + def test_onion_self_loop(self): + G = nx.cycle_graph(3) + G.add_edge(0, 0) + with pytest.raises( + nx.NetworkXNotImplemented, match="Input graph contains self loops" + ): + nx.onion_layers(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_covering.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_covering.py new file mode 100644 index 0000000..b2f97a8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_covering.py @@ -0,0 +1,85 @@ +import pytest + +import networkx as nx + + +class TestMinEdgeCover: + """Tests for :func:`networkx.algorithms.min_edge_cover`""" + + def test_empty_graph(self): + G = nx.Graph() + assert nx.min_edge_cover(G) == set() + + def test_graph_with_loop(self): + G = nx.Graph() + G.add_edge(0, 0) + assert nx.min_edge_cover(G) == {(0, 0)} + + def test_graph_with_isolated_v(self): + G = nx.Graph() + G.add_node(1) + with pytest.raises( + nx.NetworkXException, + match="Graph has a node with no edge incident on it, so no edge cover exists.", + ): + nx.min_edge_cover(G) + + def test_graph_single_edge(self): + G = nx.Graph([(0, 1)]) + assert nx.min_edge_cover(G) in ({(0, 1)}, {(1, 0)}) + + def test_graph_two_edge_path(self): + G = nx.path_graph(3) + min_cover = nx.min_edge_cover(G) + assert len(min_cover) == 2 + for u, v in G.edges: + assert (u, v) in min_cover or (v, u) in min_cover + + def test_bipartite_explicit(self): + G = nx.Graph() + G.add_nodes_from([1, 2, 3, 4], bipartite=0) + G.add_nodes_from(["a", "b", "c"], bipartite=1) + G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")]) + # Use bipartite method by prescribing the algorithm + min_cover = nx.min_edge_cover( + G, nx.algorithms.bipartite.matching.eppstein_matching + ) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 8 + # Use the default method which is not specialized for bipartite + min_cover2 = nx.min_edge_cover(G) + assert nx.is_edge_cover(G, min_cover2) + assert len(min_cover2) == 4 + + def test_complete_graph_even(self): + G = nx.complete_graph(10) + min_cover = nx.min_edge_cover(G) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 5 + + def test_complete_graph_odd(self): + G = nx.complete_graph(11) + min_cover = nx.min_edge_cover(G) + assert nx.is_edge_cover(G, min_cover) + assert len(min_cover) == 6 + + +class TestIsEdgeCover: + """Tests for :func:`networkx.algorithms.is_edge_cover`""" + + def test_empty_graph(self): + G = nx.Graph() + assert nx.is_edge_cover(G, set()) + + def test_graph_with_loop(self): + G = nx.Graph() + G.add_edge(1, 1) + assert nx.is_edge_cover(G, {(1, 1)}) + + def test_graph_single_edge(self): + G = nx.Graph() + G.add_edge(0, 1) + assert nx.is_edge_cover(G, {(0, 0), (1, 1)}) + assert nx.is_edge_cover(G, {(0, 1), (1, 0)}) + assert nx.is_edge_cover(G, {(0, 1)}) + assert not nx.is_edge_cover(G, {(0, 0)}) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cuts.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cuts.py new file mode 100644 index 0000000..923efa5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cuts.py @@ -0,0 +1,171 @@ +"""Unit tests for the :mod:`networkx.algorithms.cuts` module.""" + +import networkx as nx + + +class TestCutSize: + """Unit tests for the :func:`~networkx.cut_size` function.""" + + def test_symmetric(self): + """Tests that the cut size is symmetric.""" + G = nx.barbell_graph(3, 0) + S = {0, 1, 4} + T = {2, 3, 5} + assert nx.cut_size(G, S, T) == 4 + assert nx.cut_size(G, T, S) == 4 + + def test_single_edge(self): + """Tests for a cut of a single edge.""" + G = nx.barbell_graph(3, 0) + S = {0, 1, 2} + T = {3, 4, 5} + assert nx.cut_size(G, S, T) == 1 + assert nx.cut_size(G, T, S) == 1 + + def test_directed(self): + """Tests that each directed edge is counted once in the cut.""" + G = nx.barbell_graph(3, 0).to_directed() + S = {0, 1, 2} + T = {3, 4, 5} + assert nx.cut_size(G, S, T) == 2 + assert nx.cut_size(G, T, S) == 2 + + def test_directed_symmetric(self): + """Tests that a cut in a directed graph is symmetric.""" + G = nx.barbell_graph(3, 0).to_directed() + S = {0, 1, 4} + T = {2, 3, 5} + assert nx.cut_size(G, S, T) == 8 + assert nx.cut_size(G, T, S) == 8 + + def test_multigraph(self): + """Tests that parallel edges are each counted for a cut.""" + G = nx.MultiGraph(["ab", "ab"]) + assert nx.cut_size(G, {"a"}, {"b"}) == 2 + + +class TestVolume: + """Unit tests for the :func:`~networkx.volume` function.""" + + def test_graph(self): + G = nx.cycle_graph(4) + assert nx.volume(G, {0, 1}) == 4 + + def test_digraph(self): + G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 0)]) + assert nx.volume(G, {0, 1}) == 2 + + def test_multigraph(self): + edges = list(nx.cycle_graph(4).edges()) + G = nx.MultiGraph(edges * 2) + assert nx.volume(G, {0, 1}) == 8 + + def test_multidigraph(self): + edges = [(0, 1), (1, 2), (2, 3), (3, 0)] + G = nx.MultiDiGraph(edges * 2) + assert nx.volume(G, {0, 1}) == 4 + + def test_barbell(self): + G = nx.barbell_graph(3, 0) + assert nx.volume(G, {0, 1, 2}) == 7 + assert nx.volume(G, {3, 4, 5}) == 7 + + +class TestNormalizedCutSize: + """Unit tests for the :func:`~networkx.normalized_cut_size` function.""" + + def test_graph(self): + G = nx.path_graph(4) + S = {1, 2} + T = set(G) - S + size = nx.normalized_cut_size(G, S, T) + # The cut looks like this: o-{-o--o-}-o + expected = 2 * ((1 / 4) + (1 / 2)) + assert expected == size + # Test with no input T + assert expected == nx.normalized_cut_size(G, S) + + def test_directed(self): + G = nx.DiGraph([(0, 1), (1, 2), (2, 3)]) + S = {1, 2} + T = set(G) - S + size = nx.normalized_cut_size(G, S, T) + # The cut looks like this: o-{->o-->o-}->o + expected = 2 * ((1 / 2) + (1 / 1)) + assert expected == size + # Test with no input T + assert expected == nx.normalized_cut_size(G, S) + + +class TestConductance: + """Unit tests for the :func:`~networkx.conductance` function.""" + + def test_graph(self): + G = nx.barbell_graph(5, 0) + # Consider the singleton sets containing the "bridge" nodes. + # There is only one cut edge, and each set has volume five. + S = {4} + T = {5} + conductance = nx.conductance(G, S, T) + expected = 1 / 5 + assert expected == conductance + # Test with no input T + G2 = nx.barbell_graph(3, 0) + # There is only one cut edge, and each set has volume seven. + S2 = {0, 1, 2} + assert nx.conductance(G2, S2) == 1 / 7 + + +class TestEdgeExpansion: + """Unit tests for the :func:`~networkx.edge_expansion` function.""" + + def test_graph(self): + G = nx.barbell_graph(5, 0) + S = set(range(5)) + T = set(G) - S + expansion = nx.edge_expansion(G, S, T) + expected = 1 / 5 + assert expected == expansion + # Test with no input T + assert expected == nx.edge_expansion(G, S) + + +class TestNodeExpansion: + """Unit tests for the :func:`~networkx.node_expansion` function.""" + + def test_graph(self): + G = nx.path_graph(8) + S = {3, 4, 5} + expansion = nx.node_expansion(G, S) + # The neighborhood of S has cardinality five, and S has + # cardinality three. + expected = 5 / 3 + assert expected == expansion + + +class TestBoundaryExpansion: + """Unit tests for the :func:`~networkx.boundary_expansion` function.""" + + def test_graph(self): + G = nx.complete_graph(10) + S = set(range(4)) + expansion = nx.boundary_expansion(G, S) + # The node boundary of S has cardinality six, and S has + # cardinality three. + expected = 6 / 4 + assert expected == expansion + + +class TestMixingExpansion: + """Unit tests for the :func:`~networkx.mixing_expansion` function.""" + + def test_graph(self): + G = nx.barbell_graph(5, 0) + S = set(range(5)) + T = set(G) - S + expansion = nx.mixing_expansion(G, S, T) + # There is one cut edge, and the total number of edges in the + # graph is twice the total number of edges in a clique of size + # five, plus one more for the bridge. + expected = 1 / (2 * (5 * 4 + 1)) + assert expected == expansion diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cycles.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cycles.py new file mode 100644 index 0000000..1b43929 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_cycles.py @@ -0,0 +1,984 @@ +import random +from itertools import chain, islice, tee +from math import inf + +import pytest + +import networkx as nx +from networkx.algorithms.traversal.edgedfs import FORWARD, REVERSE + + +def check_independent(basis): + if len(basis) == 0: + return + + np = pytest.importorskip("numpy") + sp = pytest.importorskip("scipy") # Required by incidence_matrix + + H = nx.Graph() + for b in basis: + nx.add_cycle(H, b) + inc = nx.incidence_matrix(H, oriented=True) + rank = np.linalg.matrix_rank(inc.toarray(), tol=None, hermitian=False) + assert inc.shape[1] - rank == len(basis) + + +class TestCycles: + @classmethod + def setup_class(cls): + G = nx.Graph() + nx.add_cycle(G, [0, 1, 2, 3]) + nx.add_cycle(G, [0, 3, 4, 5]) + nx.add_cycle(G, [0, 1, 6, 7, 8]) + G.add_edge(8, 9) + cls.G = G + + def is_cyclic_permutation(self, a, b): + n = len(a) + if len(b) != n: + return False + l = a + a + return any(l[i : i + n] == b for i in range(n)) + + def test_cycle_basis(self): + G = self.G + cy = nx.cycle_basis(G, 0) + sort_cy = sorted(sorted(c) for c in cy) + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]] + cy = nx.cycle_basis(G, 1) + sort_cy = sorted(sorted(c) for c in cy) + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]] + cy = nx.cycle_basis(G, 9) + sort_cy = sorted(sorted(c) for c in cy) + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5]] + # test disconnected graphs + nx.add_cycle(G, "ABC") + cy = nx.cycle_basis(G, 9) + sort_cy = sorted(sorted(c) for c in cy[:-1]) + [sorted(cy[-1])] + assert sort_cy == [[0, 1, 2, 3], [0, 1, 6, 7, 8], [0, 3, 4, 5], ["A", "B", "C"]] + + def test_cycle_basis2(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + cy = nx.cycle_basis(G, 0) + + def test_cycle_basis3(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.MultiGraph() + cy = nx.cycle_basis(G, 0) + + def test_cycle_basis_ordered(self): + # see gh-6654 replace sets with (ordered) dicts + G = nx.cycle_graph(5) + G.update(nx.cycle_graph(range(3, 8))) + cbG = nx.cycle_basis(G) + + perm = {1: 0, 0: 1} # switch 0 and 1 + H = nx.relabel_nodes(G, perm) + cbH = [[perm.get(n, n) for n in cyc] for cyc in nx.cycle_basis(H)] + assert cbG == cbH + + def test_cycle_basis_self_loop(self): + """Tests the function for graphs with self loops""" + G = nx.Graph() + nx.add_cycle(G, [0, 1, 2, 3]) + nx.add_cycle(G, [0, 0, 6, 2]) + cy = nx.cycle_basis(G) + sort_cy = sorted(sorted(c) for c in cy) + assert sort_cy == [[0], [0, 1, 2], [0, 2, 3], [0, 2, 6]] + + def test_simple_cycles(self): + edges = [(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)] + G = nx.DiGraph(edges) + cc = sorted(nx.simple_cycles(G)) + ca = [[0], [0, 1, 2], [0, 2], [1, 2], [2]] + assert len(cc) == len(ca) + for c in cc: + assert any(self.is_cyclic_permutation(c, rc) for rc in ca) + + def test_simple_cycles_singleton(self): + G = nx.Graph([(0, 0)]) # self-loop + assert list(nx.simple_cycles(G)) == [[0]] + + def test_unsortable(self): + # this test ensures that graphs whose nodes without an intrinsic + # ordering do not cause issues + G = nx.DiGraph() + nx.add_cycle(G, ["a", 1]) + c = list(nx.simple_cycles(G)) + assert len(c) == 1 + + def test_simple_cycles_small(self): + G = nx.DiGraph() + nx.add_cycle(G, [1, 2, 3]) + c = sorted(nx.simple_cycles(G)) + assert len(c) == 1 + assert self.is_cyclic_permutation(c[0], [1, 2, 3]) + nx.add_cycle(G, [10, 20, 30]) + cc = sorted(nx.simple_cycles(G)) + assert len(cc) == 2 + ca = [[1, 2, 3], [10, 20, 30]] + for c in cc: + assert any(self.is_cyclic_permutation(c, rc) for rc in ca) + + def test_simple_cycles_empty(self): + G = nx.DiGraph() + assert list(nx.simple_cycles(G)) == [] + + def worst_case_graph(self, k): + # see figure 1 in Johnson's paper + # this graph has exactly 3k simple cycles + G = nx.DiGraph() + for n in range(2, k + 2): + G.add_edge(1, n) + G.add_edge(n, k + 2) + G.add_edge(2 * k + 1, 1) + for n in range(k + 2, 2 * k + 2): + G.add_edge(n, 2 * k + 2) + G.add_edge(n, n + 1) + G.add_edge(2 * k + 3, k + 2) + for n in range(2 * k + 3, 3 * k + 3): + G.add_edge(2 * k + 2, n) + G.add_edge(n, 3 * k + 3) + G.add_edge(3 * k + 3, 2 * k + 2) + return G + + def test_worst_case_graph(self): + # see figure 1 in Johnson's paper + for k in range(3, 10): + G = self.worst_case_graph(k) + l = len(list(nx.simple_cycles(G))) + assert l == 3 * k + + def test_recursive_simple_and_not(self): + for k in range(2, 10): + G = self.worst_case_graph(k) + cc = sorted(nx.simple_cycles(G)) + rcc = sorted(nx.recursive_simple_cycles(G)) + assert len(cc) == len(rcc) + for c in cc: + assert any(self.is_cyclic_permutation(c, r) for r in rcc) + for rc in rcc: + assert any(self.is_cyclic_permutation(rc, c) for c in cc) + + def test_simple_graph_with_reported_bug(self): + G = nx.DiGraph() + edges = [ + (0, 2), + (0, 3), + (1, 0), + (1, 3), + (2, 1), + (2, 4), + (3, 2), + (3, 4), + (4, 0), + (4, 1), + (4, 5), + (5, 0), + (5, 1), + (5, 2), + (5, 3), + ] + G.add_edges_from(edges) + cc = sorted(nx.simple_cycles(G)) + assert len(cc) == 26 + rcc = sorted(nx.recursive_simple_cycles(G)) + assert len(cc) == len(rcc) + for c in cc: + assert any(self.is_cyclic_permutation(c, rc) for rc in rcc) + for rc in rcc: + assert any(self.is_cyclic_permutation(rc, c) for c in cc) + + +def pairwise(iterable): + a, b = tee(iterable) + next(b, None) + return zip(a, b) + + +def cycle_edges(c): + return pairwise(chain(c, islice(c, 1))) + + +def directed_cycle_edgeset(c): + return frozenset(cycle_edges(c)) + + +def undirected_cycle_edgeset(c): + if len(c) == 1: + return frozenset(cycle_edges(c)) + return frozenset(map(frozenset, cycle_edges(c))) + + +def multigraph_cycle_edgeset(c): + if len(c) <= 2: + return frozenset(cycle_edges(c)) + else: + return frozenset(map(frozenset, cycle_edges(c))) + + +class TestCycleEnumeration: + @staticmethod + def K(n): + return nx.complete_graph(n) + + @staticmethod + def D(n): + return nx.complete_graph(n).to_directed() + + @staticmethod + def edgeset_function(g): + if g.is_directed(): + return directed_cycle_edgeset + elif g.is_multigraph(): + return multigraph_cycle_edgeset + else: + return undirected_cycle_edgeset + + def check_cycle(self, g, c, es, cache, source, original_c, length_bound, chordless): + if length_bound is not None and len(c) > length_bound: + raise RuntimeError( + f"computed cycle {original_c} exceeds length bound {length_bound}" + ) + if source == "computed": + if es in cache: + raise RuntimeError( + f"computed cycle {original_c} has already been found!" + ) + else: + cache[es] = tuple(original_c) + else: + if es in cache: + cache.pop(es) + else: + raise RuntimeError(f"expected cycle {original_c} was not computed") + + if not all(g.has_edge(*e) for e in es): + raise RuntimeError( + f"{source} claimed cycle {original_c} is not a cycle of g" + ) + if chordless and len(g.subgraph(c).edges) > len(c): + raise RuntimeError(f"{source} cycle {original_c} is not chordless") + + def check_cycle_algorithm( + self, + g, + expected_cycles, + length_bound=None, + chordless=False, + algorithm=None, + ): + if algorithm is None: + algorithm = nx.chordless_cycles if chordless else nx.simple_cycles + + # note: we shuffle the labels of g to rule out accidentally-correct + # behavior which occurred during the development of chordless cycle + # enumeration algorithms + + relabel = list(range(len(g))) + rng = random.Random(42) + rng.shuffle(relabel) + label = dict(zip(g, relabel)) + unlabel = dict(zip(relabel, g)) + h = nx.relabel_nodes(g, label, copy=True) + + edgeset = self.edgeset_function(h) + + params = {} + if length_bound is not None: + params["length_bound"] = length_bound + + cycle_cache = {} + for c in algorithm(h, **params): + original_c = [unlabel[x] for x in c] + es = edgeset(c) + self.check_cycle( + h, c, es, cycle_cache, "computed", original_c, length_bound, chordless + ) + + if isinstance(expected_cycles, int): + if len(cycle_cache) != expected_cycles: + raise RuntimeError( + f"expected {expected_cycles} cycles, got {len(cycle_cache)}" + ) + return + for original_c in expected_cycles: + c = [label[x] for x in original_c] + es = edgeset(c) + self.check_cycle( + h, c, es, cycle_cache, "expected", original_c, length_bound, chordless + ) + + if len(cycle_cache): + for c in cycle_cache.values(): + raise RuntimeError( + f"computed cycle {c} is valid but not in the expected cycle set!" + ) + + def check_cycle_enumeration_integer_sequence( + self, + g_family, + cycle_counts, + length_bound=None, + chordless=False, + algorithm=None, + ): + for g, num_cycles in zip(g_family, cycle_counts): + self.check_cycle_algorithm( + g, + num_cycles, + length_bound=length_bound, + chordless=chordless, + algorithm=algorithm, + ) + + def test_directed_chordless_cycle_digons(self): + g = nx.DiGraph() + nx.add_cycle(g, range(5)) + nx.add_cycle(g, range(5)[::-1]) + g.add_edge(0, 0) + expected_cycles = [(0,), (1, 2), (2, 3), (3, 4)] + self.check_cycle_algorithm(g, expected_cycles, chordless=True) + + self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=2) + + expected_cycles = [c for c in expected_cycles if len(c) < 2] + self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=1) + + def test_chordless_cycles_multigraph_self_loops(self): + G = nx.MultiGraph([(1, 1), (2, 2), (1, 2), (1, 2)]) + expected_cycles = [[1], [2]] + self.check_cycle_algorithm(G, expected_cycles, chordless=True) + + G.add_edges_from([(2, 3), (3, 4), (3, 4), (1, 3)]) + expected_cycles = [[1], [2], [3, 4]] + self.check_cycle_algorithm(G, expected_cycles, chordless=True) + + def test_directed_chordless_cycle_undirected(self): + g = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 5), (5, 0), (5, 1), (0, 2)]) + expected_cycles = [(0, 2, 3, 4, 5), (1, 2, 3, 4, 5)] + self.check_cycle_algorithm(g, expected_cycles, chordless=True) + + g = nx.DiGraph() + nx.add_cycle(g, range(5)) + nx.add_cycle(g, range(4, 9)) + g.add_edge(7, 3) + expected_cycles = [(0, 1, 2, 3, 4), (3, 4, 5, 6, 7), (4, 5, 6, 7, 8)] + self.check_cycle_algorithm(g, expected_cycles, chordless=True) + + g.add_edge(3, 7) + expected_cycles = [(0, 1, 2, 3, 4), (3, 7), (4, 5, 6, 7, 8)] + self.check_cycle_algorithm(g, expected_cycles, chordless=True) + + expected_cycles = [(3, 7)] + self.check_cycle_algorithm(g, expected_cycles, chordless=True, length_bound=4) + + g.remove_edge(7, 3) + expected_cycles = [(0, 1, 2, 3, 4), (4, 5, 6, 7, 8)] + self.check_cycle_algorithm(g, expected_cycles, chordless=True) + + g = nx.DiGraph((i, j) for i in range(10) for j in range(i)) + expected_cycles = [] + self.check_cycle_algorithm(g, expected_cycles, chordless=True) + + def test_chordless_cycles_directed(self): + G = nx.DiGraph() + nx.add_cycle(G, range(5)) + nx.add_cycle(G, range(4, 12)) + expected = [[*range(5)], [*range(4, 12)]] + self.check_cycle_algorithm(G, expected, chordless=True) + self.check_cycle_algorithm( + G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True + ) + + G.add_edge(7, 3) + expected.append([*range(3, 8)]) + self.check_cycle_algorithm(G, expected, chordless=True) + self.check_cycle_algorithm( + G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True + ) + + G.add_edge(3, 7) + expected[-1] = [7, 3] + self.check_cycle_algorithm(G, expected, chordless=True) + self.check_cycle_algorithm( + G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True + ) + + expected.pop() + G.remove_edge(7, 3) + self.check_cycle_algorithm(G, expected, chordless=True) + self.check_cycle_algorithm( + G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True + ) + + def test_directed_chordless_cycle_diclique(self): + g_family = [self.D(n) for n in range(10)] + expected_cycles = [(n * n - n) // 2 for n in range(10)] + self.check_cycle_enumeration_integer_sequence( + g_family, expected_cycles, chordless=True + ) + + expected_cycles = [(n * n - n) // 2 for n in range(10)] + self.check_cycle_enumeration_integer_sequence( + g_family, expected_cycles, length_bound=2 + ) + + def test_directed_chordless_loop_blockade(self): + g = nx.DiGraph((i, i) for i in range(10)) + nx.add_cycle(g, range(10)) + expected_cycles = [(i,) for i in range(10)] + self.check_cycle_algorithm(g, expected_cycles, chordless=True) + + self.check_cycle_algorithm(g, expected_cycles, length_bound=1) + + g = nx.MultiDiGraph(g) + g.add_edges_from((i, i) for i in range(0, 10, 2)) + expected_cycles = [(i,) for i in range(1, 10, 2)] + self.check_cycle_algorithm(g, expected_cycles, chordless=True) + + def test_simple_cycles_notable_clique_sequences(self): + # A000292: Number of labeled graphs on n+3 nodes that are triangles. + g_family = [self.K(n) for n in range(2, 12)] + expected = [0, 1, 4, 10, 20, 35, 56, 84, 120, 165, 220] + self.check_cycle_enumeration_integer_sequence( + g_family, expected, length_bound=3 + ) + + def triangles(g, **kwargs): + yield from (c for c in nx.simple_cycles(g, **kwargs) if len(c) == 3) + + # directed complete graphs have twice as many triangles thanks to reversal + g_family = [self.D(n) for n in range(2, 12)] + expected = [2 * e for e in expected] + self.check_cycle_enumeration_integer_sequence( + g_family, expected, length_bound=3, algorithm=triangles + ) + + def four_cycles(g, **kwargs): + yield from (c for c in nx.simple_cycles(g, **kwargs) if len(c) == 4) + + # A050534: the number of 4-cycles in the complete graph K_{n+1} + expected = [0, 0, 0, 3, 15, 45, 105, 210, 378, 630, 990] + g_family = [self.K(n) for n in range(1, 12)] + self.check_cycle_enumeration_integer_sequence( + g_family, expected, length_bound=4, algorithm=four_cycles + ) + + # directed complete graphs have twice as many 4-cycles thanks to reversal + expected = [2 * e for e in expected] + g_family = [self.D(n) for n in range(1, 15)] + self.check_cycle_enumeration_integer_sequence( + g_family, expected, length_bound=4, algorithm=four_cycles + ) + + # A006231: the number of elementary circuits in a complete directed graph with n nodes + expected = [0, 1, 5, 20, 84, 409, 2365] + g_family = [self.D(n) for n in range(1, 8)] + self.check_cycle_enumeration_integer_sequence(g_family, expected) + + # A002807: Number of cycles in the complete graph on n nodes K_{n}. + expected = [0, 0, 0, 1, 7, 37, 197, 1172] + g_family = [self.K(n) for n in range(8)] + self.check_cycle_enumeration_integer_sequence(g_family, expected) + + def test_directed_chordless_cycle_parallel_multiedges(self): + g = nx.MultiGraph() + + nx.add_cycle(g, range(5)) + expected = [[*range(5)]] + self.check_cycle_algorithm(g, expected, chordless=True) + + nx.add_cycle(g, range(5)) + expected = [*cycle_edges(range(5))] + self.check_cycle_algorithm(g, expected, chordless=True) + + nx.add_cycle(g, range(5)) + expected = [] + self.check_cycle_algorithm(g, expected, chordless=True) + + g = nx.MultiDiGraph() + + nx.add_cycle(g, range(5)) + expected = [[*range(5)]] + self.check_cycle_algorithm(g, expected, chordless=True) + + nx.add_cycle(g, range(5)) + self.check_cycle_algorithm(g, [], chordless=True) + + nx.add_cycle(g, range(5)) + self.check_cycle_algorithm(g, [], chordless=True) + + g = nx.MultiDiGraph() + + nx.add_cycle(g, range(5)) + nx.add_cycle(g, range(5)[::-1]) + expected = [*cycle_edges(range(5))] + self.check_cycle_algorithm(g, expected, chordless=True) + + nx.add_cycle(g, range(5)) + self.check_cycle_algorithm(g, [], chordless=True) + + def test_chordless_cycles_graph(self): + G = nx.Graph() + nx.add_cycle(G, range(5)) + nx.add_cycle(G, range(4, 12)) + expected = [[*range(5)], [*range(4, 12)]] + self.check_cycle_algorithm(G, expected, chordless=True) + self.check_cycle_algorithm( + G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True + ) + + G.add_edge(7, 3) + expected.append([*range(3, 8)]) + expected.append([4, 3, 7, 8, 9, 10, 11]) + self.check_cycle_algorithm(G, expected, chordless=True) + self.check_cycle_algorithm( + G, [c for c in expected if len(c) <= 5], length_bound=5, chordless=True + ) + + def test_chordless_cycles_giant_hamiltonian(self): + # ... o - e - o - e - o ... # o = odd, e = even + # ... ---/ \-----/ \--- ... # <-- "long" edges + # + # each long edge belongs to exactly one triangle, and one giant cycle + # of length n/2. The remaining edges each belong to a triangle + + n = 1000 + assert n % 2 == 0 + G = nx.Graph() + for v in range(n): + if not v % 2: + G.add_edge(v, (v + 2) % n) + G.add_edge(v, (v + 1) % n) + + expected = [[*range(0, n, 2)]] + [ + [x % n for x in range(i, i + 3)] for i in range(0, n, 2) + ] + self.check_cycle_algorithm(G, expected, chordless=True) + self.check_cycle_algorithm( + G, [c for c in expected if len(c) <= 3], length_bound=3, chordless=True + ) + + # ... o -> e -> o -> e -> o ... # o = odd, e = even + # ... <---/ \---<---/ \---< ... # <-- "long" edges + # + # this time, we orient the short and long edges in opposition + # the cycle structure of this graph is the same, but we need to reverse + # the long one in our representation. Also, we need to drop the size + # because our partitioning algorithm uses strongly connected components + # instead of separating graphs by their strong articulation points + + n = 100 + assert n % 2 == 0 + G = nx.DiGraph() + for v in range(n): + G.add_edge(v, (v + 1) % n) + if not v % 2: + G.add_edge((v + 2) % n, v) + + expected = [[*range(n - 2, -2, -2)]] + [ + [x % n for x in range(i, i + 3)] for i in range(0, n, 2) + ] + self.check_cycle_algorithm(G, expected, chordless=True) + self.check_cycle_algorithm( + G, [c for c in expected if len(c) <= 3], length_bound=3, chordless=True + ) + + def test_simple_cycles_acyclic_tournament(self): + n = 10 + G = nx.DiGraph((x, y) for x in range(n) for y in range(x)) + self.check_cycle_algorithm(G, []) + self.check_cycle_algorithm(G, [], chordless=True) + + for k in range(n + 1): + self.check_cycle_algorithm(G, [], length_bound=k) + self.check_cycle_algorithm(G, [], length_bound=k, chordless=True) + + def test_simple_cycles_graph(self): + testG = nx.cycle_graph(8) + cyc1 = tuple(range(8)) + self.check_cycle_algorithm(testG, [cyc1]) + + testG.add_edge(4, -1) + nx.add_path(testG, [3, -2, -3, -4]) + self.check_cycle_algorithm(testG, [cyc1]) + + testG.update(nx.cycle_graph(range(8, 16))) + cyc2 = tuple(range(8, 16)) + self.check_cycle_algorithm(testG, [cyc1, cyc2]) + + testG.update(nx.cycle_graph(range(4, 12))) + cyc3 = tuple(range(4, 12)) + expected = { + (0, 1, 2, 3, 4, 5, 6, 7), # cyc1 + (8, 9, 10, 11, 12, 13, 14, 15), # cyc2 + (4, 5, 6, 7, 8, 9, 10, 11), # cyc3 + (4, 5, 6, 7, 8, 15, 14, 13, 12, 11), # cyc2 + cyc3 + (0, 1, 2, 3, 4, 11, 10, 9, 8, 7), # cyc1 + cyc3 + (0, 1, 2, 3, 4, 11, 12, 13, 14, 15, 8, 7), # cyc1 + cyc2 + cyc3 + } + self.check_cycle_algorithm(testG, expected) + assert len(expected) == (2**3 - 1) - 1 # 1 disjoint comb: cyc1 + cyc2 + + # Basis size = 5 (2 loops overlapping gives 5 small loops + # E + # / \ Note: A-F = 10-15 + # 1-2-3-4-5 + # / | | \ cyc1=012DAB -- left + # 0 D F 6 cyc2=234E -- top + # \ | | / cyc3=45678F -- right + # B-A-9-8-7 cyc4=89AC -- bottom + # \ / cyc5=234F89AD -- middle + # C + # + # combinations of 5 basis elements: 2^5 - 1 (one includes no cycles) + # + # disjoint combs: (11 total) not simple cycles + # Any pair not including cyc5 => choose(4, 2) = 6 + # Any triple not including cyc5 => choose(4, 3) = 4 + # Any quad not including cyc5 => choose(4, 4) = 1 + # + # we expect 31 - 11 = 20 simple cycles + # + testG = nx.cycle_graph(12) + testG.update(nx.cycle_graph([12, 10, 13, 2, 14, 4, 15, 8]).edges) + expected = (2**5 - 1) - 11 # 11 disjoint combinations + self.check_cycle_algorithm(testG, expected) + + def test_simple_cycles_bounded(self): + # iteratively construct a cluster of nested cycles running in the same direction + # there should be one cycle of every length + d = nx.DiGraph() + expected = [] + for n in range(10): + nx.add_cycle(d, range(n)) + expected.append(n) + for k, e in enumerate(expected): + self.check_cycle_algorithm(d, e, length_bound=k) + + # iteratively construct a path of undirected cycles, connected at articulation + # points. there should be one cycle of every length except 2: no digons + g = nx.Graph() + top = 0 + expected = [] + for n in range(10): + expected.append(n if n < 2 else n - 1) + if n == 2: + # no digons in undirected graphs + continue + nx.add_cycle(g, range(top, top + n)) + top += n + for k, e in enumerate(expected): + self.check_cycle_algorithm(g, e, length_bound=k) + + def test_simple_cycles_bound_corner_cases(self): + G = nx.cycle_graph(4) + DG = nx.cycle_graph(4, create_using=nx.DiGraph) + assert list(nx.simple_cycles(G, length_bound=0)) == [] + assert list(nx.simple_cycles(DG, length_bound=0)) == [] + assert list(nx.chordless_cycles(G, length_bound=0)) == [] + assert list(nx.chordless_cycles(DG, length_bound=0)) == [] + + def test_simple_cycles_bound_error(self): + with pytest.raises(ValueError): + G = nx.DiGraph() + for c in nx.simple_cycles(G, -1): + assert False + + with pytest.raises(ValueError): + G = nx.Graph() + for c in nx.simple_cycles(G, -1): + assert False + + with pytest.raises(ValueError): + G = nx.Graph() + for c in nx.chordless_cycles(G, -1): + assert False + + with pytest.raises(ValueError): + G = nx.DiGraph() + for c in nx.chordless_cycles(G, -1): + assert False + + def test_chordless_cycles_clique(self): + g_family = [self.K(n) for n in range(2, 15)] + expected = [0, 1, 4, 10, 20, 35, 56, 84, 120, 165, 220, 286, 364] + self.check_cycle_enumeration_integer_sequence( + g_family, expected, chordless=True + ) + + # directed cliques have as many digons as undirected graphs have edges + expected = [(n * n - n) // 2 for n in range(15)] + g_family = [self.D(n) for n in range(15)] + self.check_cycle_enumeration_integer_sequence( + g_family, expected, chordless=True + ) + + +# These tests might fail with hash randomization since they depend on +# edge_dfs. For more information, see the comments in: +# networkx/algorithms/traversal/tests/test_edgedfs.py + + +class TestFindCycle: + @classmethod + def setup_class(cls): + cls.nodes = [0, 1, 2, 3] + cls.edges = [(-1, 0), (0, 1), (1, 0), (1, 0), (2, 1), (3, 1)] + + def test_graph_nocycle(self): + G = nx.Graph(self.edges) + pytest.raises(nx.exception.NetworkXNoCycle, nx.find_cycle, G, self.nodes) + + def test_graph_cycle(self): + G = nx.Graph(self.edges) + G.add_edge(2, 0) + x = list(nx.find_cycle(G, self.nodes)) + x_ = [(0, 1), (1, 2), (2, 0)] + assert x == x_ + + def test_graph_orientation_none(self): + G = nx.Graph(self.edges) + G.add_edge(2, 0) + x = list(nx.find_cycle(G, self.nodes, orientation=None)) + x_ = [(0, 1), (1, 2), (2, 0)] + assert x == x_ + + def test_graph_orientation_original(self): + G = nx.Graph(self.edges) + G.add_edge(2, 0) + x = list(nx.find_cycle(G, self.nodes, orientation="original")) + x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 0, FORWARD)] + assert x == x_ + + def test_digraph(self): + G = nx.DiGraph(self.edges) + x = list(nx.find_cycle(G, self.nodes)) + x_ = [(0, 1), (1, 0)] + assert x == x_ + + def test_digraph_orientation_none(self): + G = nx.DiGraph(self.edges) + x = list(nx.find_cycle(G, self.nodes, orientation=None)) + x_ = [(0, 1), (1, 0)] + assert x == x_ + + def test_digraph_orientation_original(self): + G = nx.DiGraph(self.edges) + x = list(nx.find_cycle(G, self.nodes, orientation="original")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD)] + assert x == x_ + + def test_multigraph(self): + G = nx.MultiGraph(self.edges) + x = list(nx.find_cycle(G, self.nodes)) + x_ = [(0, 1, 0), (1, 0, 1)] # or (1, 0, 2) + # Hash randomization...could be any edge. + assert x[0] == x_[0] + assert x[1][:2] == x_[1][:2] + + def test_multidigraph(self): + G = nx.MultiDiGraph(self.edges) + x = list(nx.find_cycle(G, self.nodes)) + x_ = [(0, 1, 0), (1, 0, 0)] # (1, 0, 1) + assert x[0] == x_[0] + assert x[1][:2] == x_[1][:2] + + def test_digraph_ignore(self): + G = nx.DiGraph(self.edges) + x = list(nx.find_cycle(G, self.nodes, orientation="ignore")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD)] + assert x == x_ + + def test_digraph_reverse(self): + G = nx.DiGraph(self.edges) + x = list(nx.find_cycle(G, self.nodes, orientation="reverse")) + x_ = [(1, 0, REVERSE), (0, 1, REVERSE)] + assert x == x_ + + def test_multidigraph_ignore(self): + G = nx.MultiDiGraph(self.edges) + x = list(nx.find_cycle(G, self.nodes, orientation="ignore")) + x_ = [(0, 1, 0, FORWARD), (1, 0, 0, FORWARD)] # or (1, 0, 1, 1) + assert x[0] == x_[0] + assert x[1][:2] == x_[1][:2] + assert x[1][3] == x_[1][3] + + def test_multidigraph_ignore2(self): + # Loop traversed an edge while ignoring its orientation. + G = nx.MultiDiGraph([(0, 1), (1, 2), (1, 2)]) + x = list(nx.find_cycle(G, [0, 1, 2], orientation="ignore")) + x_ = [(1, 2, 0, FORWARD), (1, 2, 1, REVERSE)] + assert x == x_ + + def test_multidigraph_original(self): + # Node 2 doesn't need to be searched again from visited from 4. + # The goal here is to cover the case when 2 to be researched from 4, + # when 4 is visited from the first time (so we must make sure that 4 + # is not visited from 2, and hence, we respect the edge orientation). + G = nx.MultiDiGraph([(0, 1), (1, 2), (2, 3), (4, 2)]) + pytest.raises( + nx.exception.NetworkXNoCycle, + nx.find_cycle, + G, + [0, 1, 2, 3, 4], + orientation="original", + ) + + def test_dag(self): + G = nx.DiGraph([(0, 1), (0, 2), (1, 2)]) + pytest.raises( + nx.exception.NetworkXNoCycle, nx.find_cycle, G, orientation="original" + ) + x = list(nx.find_cycle(G, orientation="ignore")) + assert x == [(0, 1, FORWARD), (1, 2, FORWARD), (0, 2, REVERSE)] + + def test_prev_explored(self): + # https://github.com/networkx/networkx/issues/2323 + + G = nx.DiGraph() + G.add_edges_from([(1, 0), (2, 0), (1, 2), (2, 1)]) + pytest.raises(nx.NetworkXNoCycle, nx.find_cycle, G, source=0) + x = list(nx.find_cycle(G, 1)) + x_ = [(1, 2), (2, 1)] + assert x == x_ + + x = list(nx.find_cycle(G, 2)) + x_ = [(2, 1), (1, 2)] + assert x == x_ + + x = list(nx.find_cycle(G)) + x_ = [(1, 2), (2, 1)] + assert x == x_ + + def test_no_cycle(self): + # https://github.com/networkx/networkx/issues/2439 + + G = nx.DiGraph() + G.add_edges_from([(1, 2), (2, 0), (3, 1), (3, 2)]) + pytest.raises(nx.NetworkXNoCycle, nx.find_cycle, G, source=0) + pytest.raises(nx.NetworkXNoCycle, nx.find_cycle, G) + + +def assert_basis_equal(a, b): + assert sorted(a) == sorted(b) + + +class TestMinimumCycleBasis: + @classmethod + def setup_class(cls): + T = nx.Graph() + nx.add_cycle(T, [1, 2, 3, 4], weight=1) + T.add_edge(2, 4, weight=5) + cls.diamond_graph = T + + def test_unweighted_diamond(self): + mcb = nx.minimum_cycle_basis(self.diamond_graph) + assert_basis_equal(mcb, [[2, 4, 1], [3, 4, 2]]) + + def test_weighted_diamond(self): + mcb = nx.minimum_cycle_basis(self.diamond_graph, weight="weight") + assert_basis_equal(mcb, [[2, 4, 1], [4, 3, 2, 1]]) + + def test_dimensionality(self): + # checks |MCB|=|E|-|V|+|NC| + ntrial = 10 + for seed in range(1234, 1234 + ntrial): + rg = nx.erdos_renyi_graph(10, 0.3, seed=seed) + nnodes = rg.number_of_nodes() + nedges = rg.number_of_edges() + ncomp = nx.number_connected_components(rg) + + mcb = nx.minimum_cycle_basis(rg) + assert len(mcb) == nedges - nnodes + ncomp + check_independent(mcb) + + def test_complete_graph(self): + cg = nx.complete_graph(5) + mcb = nx.minimum_cycle_basis(cg) + assert all(len(cycle) == 3 for cycle in mcb) + check_independent(mcb) + + def test_tree_graph(self): + tg = nx.balanced_tree(3, 3) + assert not nx.minimum_cycle_basis(tg) + + def test_petersen_graph(self): + G = nx.petersen_graph() + mcb = list(nx.minimum_cycle_basis(G)) + expected = [ + [4, 9, 7, 5, 0], + [1, 2, 3, 4, 0], + [1, 6, 8, 5, 0], + [4, 3, 8, 5, 0], + [1, 6, 9, 4, 0], + [1, 2, 7, 5, 0], + ] + assert len(mcb) == len(expected) + assert all(c in expected for c in mcb) + + # check that order of the nodes is a path + for c in mcb: + assert all(G.has_edge(u, v) for u, v in nx.utils.pairwise(c, cyclic=True)) + # check independence of the basis + check_independent(mcb) + + def test_gh6787_variable_weighted_complete_graph(self): + N = 8 + cg = nx.complete_graph(N) + cg.add_weighted_edges_from([(u, v, 9) for u, v in cg.edges]) + cg.add_weighted_edges_from([(u, v, 1) for u, v in nx.cycle_graph(N).edges]) + mcb = nx.minimum_cycle_basis(cg, weight="weight") + check_independent(mcb) + + def test_gh6787_and_edge_attribute_names(self): + G = nx.cycle_graph(4) + G.add_weighted_edges_from([(0, 2, 10), (1, 3, 10)], weight="dist") + expected = [[1, 3, 0], [3, 2, 1, 0], [1, 2, 0]] + mcb = list(nx.minimum_cycle_basis(G, weight="dist")) + assert len(mcb) == len(expected) + assert all(c in expected for c in mcb) + + # test not using a weight with weight attributes + expected = [[1, 3, 0], [1, 2, 0], [3, 2, 0]] + mcb = list(nx.minimum_cycle_basis(G)) + assert len(mcb) == len(expected) + assert all(c in expected for c in mcb) + + +class TestGirth: + @pytest.mark.parametrize( + ("G", "expected"), + ( + (nx.chvatal_graph(), 4), + (nx.tutte_graph(), 4), + (nx.petersen_graph(), 5), + (nx.heawood_graph(), 6), + (nx.pappus_graph(), 6), + (nx.random_labeled_tree(10, seed=42), inf), + (nx.empty_graph(10), inf), + (nx.Graph(chain(cycle_edges(range(5)), cycle_edges(range(6, 10)))), 4), + ( + nx.Graph( + [ + (0, 6), + (0, 8), + (0, 9), + (1, 8), + (2, 8), + (2, 9), + (4, 9), + (5, 9), + (6, 8), + (6, 9), + (7, 8), + ] + ), + 3, + ), + ), + ) + def test_girth(self, G, expected): + assert nx.girth(G) == expected diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_d_separation.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_d_separation.py new file mode 100644 index 0000000..f760829 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_d_separation.py @@ -0,0 +1,340 @@ +from itertools import combinations + +import pytest + +import networkx as nx + + +def path_graph(): + """Return a path graph of length three.""" + G = nx.path_graph(3, create_using=nx.DiGraph) + G.graph["name"] = "path" + nx.freeze(G) + return G + + +def fork_graph(): + """Return a three node fork graph.""" + G = nx.DiGraph(name="fork") + G.add_edges_from([(0, 1), (0, 2)]) + nx.freeze(G) + return G + + +def collider_graph(): + """Return a collider/v-structure graph with three nodes.""" + G = nx.DiGraph(name="collider") + G.add_edges_from([(0, 2), (1, 2)]) + nx.freeze(G) + return G + + +def naive_bayes_graph(): + """Return a simply Naive Bayes PGM graph.""" + G = nx.DiGraph(name="naive_bayes") + G.add_edges_from([(0, 1), (0, 2), (0, 3), (0, 4)]) + nx.freeze(G) + return G + + +def asia_graph(): + """Return the 'Asia' PGM graph.""" + G = nx.DiGraph(name="asia") + G.add_edges_from( + [ + ("asia", "tuberculosis"), + ("smoking", "cancer"), + ("smoking", "bronchitis"), + ("tuberculosis", "either"), + ("cancer", "either"), + ("either", "xray"), + ("either", "dyspnea"), + ("bronchitis", "dyspnea"), + ] + ) + nx.freeze(G) + return G + + +@pytest.fixture(name="path_graph") +def path_graph_fixture(): + return path_graph() + + +@pytest.fixture(name="fork_graph") +def fork_graph_fixture(): + return fork_graph() + + +@pytest.fixture(name="collider_graph") +def collider_graph_fixture(): + return collider_graph() + + +@pytest.fixture(name="naive_bayes_graph") +def naive_bayes_graph_fixture(): + return naive_bayes_graph() + + +@pytest.fixture(name="asia_graph") +def asia_graph_fixture(): + return asia_graph() + + +@pytest.fixture() +def large_collider_graph(): + edge_list = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("B", "F"), ("G", "E")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def chain_and_fork_graph(): + edge_list = [("A", "B"), ("B", "C"), ("B", "D"), ("D", "C")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def no_separating_set_graph(): + edge_list = [("A", "B")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def large_no_separating_set_graph(): + edge_list = [("A", "B"), ("C", "A"), ("C", "B")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.fixture() +def collider_trek_graph(): + edge_list = [("A", "B"), ("C", "B"), ("C", "D")] + G = nx.DiGraph(edge_list) + return G + + +@pytest.mark.parametrize( + "graph", + [path_graph(), fork_graph(), collider_graph(), naive_bayes_graph(), asia_graph()], +) +def test_markov_condition(graph): + """Test that the Markov condition holds for each PGM graph.""" + for node in graph.nodes: + parents = set(graph.predecessors(node)) + non_descendants = graph.nodes - nx.descendants(graph, node) - {node} - parents + assert nx.is_d_separator(graph, {node}, non_descendants, parents) + + +def test_path_graph_dsep(path_graph): + """Example-based test of d-separation for path_graph.""" + assert nx.is_d_separator(path_graph, {0}, {2}, {1}) + assert not nx.is_d_separator(path_graph, {0}, {2}, set()) + + +def test_fork_graph_dsep(fork_graph): + """Example-based test of d-separation for fork_graph.""" + assert nx.is_d_separator(fork_graph, {1}, {2}, {0}) + assert not nx.is_d_separator(fork_graph, {1}, {2}, set()) + + +def test_collider_graph_dsep(collider_graph): + """Example-based test of d-separation for collider_graph.""" + assert nx.is_d_separator(collider_graph, {0}, {1}, set()) + assert not nx.is_d_separator(collider_graph, {0}, {1}, {2}) + + +def test_naive_bayes_dsep(naive_bayes_graph): + """Example-based test of d-separation for naive_bayes_graph.""" + for u, v in combinations(range(1, 5), 2): + assert nx.is_d_separator(naive_bayes_graph, {u}, {v}, {0}) + assert not nx.is_d_separator(naive_bayes_graph, {u}, {v}, set()) + + +def test_asia_graph_dsep(asia_graph): + """Example-based test of d-separation for asia_graph.""" + assert nx.is_d_separator( + asia_graph, {"asia", "smoking"}, {"dyspnea", "xray"}, {"bronchitis", "either"} + ) + assert nx.is_d_separator( + asia_graph, {"tuberculosis", "cancer"}, {"bronchitis"}, {"smoking", "xray"} + ) + + +def test_undirected_graphs_are_not_supported(): + """ + Test that undirected graphs are not supported. + + d-separation and its related algorithms do not apply in + the case of undirected graphs. + """ + g = nx.path_graph(3, nx.Graph) + with pytest.raises(nx.NetworkXNotImplemented): + nx.is_d_separator(g, {0}, {1}, {2}) + with pytest.raises(nx.NetworkXNotImplemented): + nx.is_minimal_d_separator(g, {0}, {1}, {2}) + with pytest.raises(nx.NetworkXNotImplemented): + nx.find_minimal_d_separator(g, {0}, {1}) + + +def test_cyclic_graphs_raise_error(): + """ + Test that cycle graphs should cause erroring. + + This is because PGMs assume a directed acyclic graph. + """ + g = nx.cycle_graph(3, nx.DiGraph) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(g, {0}, {1}, {2}) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(g, {0}, {1}) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(g, {0}, {1}, {2}) + + +def test_invalid_nodes_raise_error(asia_graph): + """ + Test that graphs that have invalid nodes passed in raise errors. + """ + # Check both set and node arguments + with pytest.raises(nx.NodeNotFound): + nx.is_d_separator(asia_graph, {0}, {1}, {2}) + with pytest.raises(nx.NodeNotFound): + nx.is_d_separator(asia_graph, 0, 1, 2) + with pytest.raises(nx.NodeNotFound): + nx.is_minimal_d_separator(asia_graph, {0}, {1}, {2}) + with pytest.raises(nx.NodeNotFound): + nx.is_minimal_d_separator(asia_graph, 0, 1, 2) + with pytest.raises(nx.NodeNotFound): + nx.find_minimal_d_separator(asia_graph, {0}, {1}) + with pytest.raises(nx.NodeNotFound): + nx.find_minimal_d_separator(asia_graph, 0, 1) + + +def test_nondisjoint_node_sets_raise_error(collider_graph): + """ + Test that error is raised when node sets aren't disjoint. + """ + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 1, 0) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 2, 0) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 0, 0, 1) + with pytest.raises(nx.NetworkXError): + nx.is_d_separator(collider_graph, 1, 0, 0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 0, 0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 0, 1, included=0) + with pytest.raises(nx.NetworkXError): + nx.find_minimal_d_separator(collider_graph, 1, 0, included=0) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 0, 0, set()) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 0, 1, set(), included=0) + with pytest.raises(nx.NetworkXError): + nx.is_minimal_d_separator(collider_graph, 1, 0, set(), included=0) + + +def test_is_minimal_d_separator( + large_collider_graph, + chain_and_fork_graph, + no_separating_set_graph, + large_no_separating_set_graph, + collider_trek_graph, +): + # Case 1: + # create a graph A -> B <- C + # B -> D -> E; + # B -> F; + # G -> E; + assert not nx.is_d_separator(large_collider_graph, {"B"}, {"E"}, set()) + + # minimal set of the corresponding graph + # for B and E should be (D,) + Zmin = nx.find_minimal_d_separator(large_collider_graph, "B", "E") + # check that the minimal d-separator is a d-separating set + assert nx.is_d_separator(large_collider_graph, "B", "E", Zmin) + # the minimal separating set should also pass the test for minimality + assert nx.is_minimal_d_separator(large_collider_graph, "B", "E", Zmin) + # function should also work with set arguments + assert nx.is_minimal_d_separator(large_collider_graph, {"A", "B"}, {"G", "E"}, Zmin) + assert Zmin == {"D"} + + # Case 2: + # create a graph A -> B -> C + # B -> D -> C; + assert not nx.is_d_separator(chain_and_fork_graph, {"A"}, {"C"}, set()) + Zmin = nx.find_minimal_d_separator(chain_and_fork_graph, "A", "C") + + # the minimal separating set should pass the test for minimality + assert nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Zmin) + assert Zmin == {"B"} + Znotmin = Zmin.union({"D"}) + assert not nx.is_minimal_d_separator(chain_and_fork_graph, "A", "C", Znotmin) + + # Case 3: + # create a graph A -> B + + # there is no m-separating set between A and B at all, so + # no minimal m-separating set can exist + assert not nx.is_d_separator(no_separating_set_graph, {"A"}, {"B"}, set()) + assert nx.find_minimal_d_separator(no_separating_set_graph, "A", "B") is None + + # Case 4: + # create a graph A -> B with A <- C -> B + + # there is no m-separating set between A and B at all, so + # no minimal m-separating set can exist + # however, the algorithm will initially propose C as a + # minimal (but invalid) separating set + assert not nx.is_d_separator(large_no_separating_set_graph, {"A"}, {"B"}, {"C"}) + assert nx.find_minimal_d_separator(large_no_separating_set_graph, "A", "B") is None + + # Test `included` and `excluded` args + # create graph A -> B <- C -> D + assert nx.find_minimal_d_separator(collider_trek_graph, "A", "D", included="B") == { + "B", + "C", + } + assert ( + nx.find_minimal_d_separator( + collider_trek_graph, "A", "D", included="B", restricted="B" + ) + is None + ) + + +def test_is_minimal_d_separator_checks_dsep(): + """Test that is_minimal_d_separator checks for d-separation as well.""" + g = nx.DiGraph() + g.add_edges_from( + [ + ("A", "B"), + ("A", "E"), + ("B", "C"), + ("B", "D"), + ("D", "C"), + ("D", "F"), + ("E", "D"), + ("E", "F"), + ] + ) + + assert not nx.is_d_separator(g, {"C"}, {"F"}, {"D"}) + + # since {'D'} and {} are not d-separators, we return false + assert not nx.is_minimal_d_separator(g, "C", "F", {"D"}) + assert not nx.is_minimal_d_separator(g, "C", "F", set()) + + +def test__reachable(large_collider_graph): + reachable = nx.algorithms.d_separation._reachable + g = large_collider_graph + x = {"F", "D"} + ancestors = {"A", "B", "C", "D", "F"} + assert reachable(g, x, ancestors, {"B"}) == {"B", "F", "D"} + assert reachable(g, x, ancestors, set()) == ancestors diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dag.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dag.py new file mode 100644 index 0000000..4312ce3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dag.py @@ -0,0 +1,837 @@ +from collections import deque +from itertools import combinations, permutations + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, pairwise + + +# Recipe from the itertools documentation. +def _consume(iterator): + "Consume the iterator entirely." + # Feed the entire iterator into a zero-length deque. + deque(iterator, maxlen=0) + + +class TestDagLongestPath: + """Unit tests computing the longest path in a directed acyclic graph.""" + + def test_empty(self): + G = nx.DiGraph() + assert nx.dag_longest_path(G) == [] + + def test_unweighted1(self): + edges = [(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (3, 7)] + G = nx.DiGraph(edges) + assert nx.dag_longest_path(G) == [1, 2, 3, 5, 6] + + def test_unweighted2(self): + edges = [(1, 2), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)] + G = nx.DiGraph(edges) + assert nx.dag_longest_path(G) == [1, 2, 3, 4, 5] + + def test_weighted(self): + G = nx.DiGraph() + edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), (1, 6, 2)] + G.add_weighted_edges_from(edges) + assert nx.dag_longest_path(G) == [2, 3, 5] + + def test_undirected_not_implemented(self): + G = nx.Graph() + pytest.raises(nx.NetworkXNotImplemented, nx.dag_longest_path, G) + + def test_unorderable_nodes(self): + """Tests that computing the longest path does not depend on + nodes being orderable. + + For more information, see issue #1989. + + """ + # Create the directed path graph on four nodes in a diamond shape, + # with nodes represented as (unorderable) Python objects. + nodes = [object() for n in range(4)] + G = nx.DiGraph() + G.add_edge(nodes[0], nodes[1]) + G.add_edge(nodes[0], nodes[2]) + G.add_edge(nodes[2], nodes[3]) + G.add_edge(nodes[1], nodes[3]) + + # this will raise NotImplementedError when nodes need to be ordered + nx.dag_longest_path(G) + + def test_multigraph_unweighted(self): + edges = [(1, 2), (2, 3), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)] + G = nx.MultiDiGraph(edges) + assert nx.dag_longest_path(G) == [1, 2, 3, 4, 5] + + def test_multigraph_weighted(self): + G = nx.MultiDiGraph() + edges = [ + (1, 2, 2), + (2, 3, 2), + (1, 3, 1), + (1, 3, 5), + (1, 3, 2), + ] + G.add_weighted_edges_from(edges) + assert nx.dag_longest_path(G) == [1, 3] + + def test_multigraph_weighted_default_weight(self): + G = nx.MultiDiGraph([(1, 2), (2, 3)]) # Unweighted edges + G.add_weighted_edges_from([(1, 3, 1), (1, 3, 5), (1, 3, 2)]) + + # Default value for default weight is 1 + assert nx.dag_longest_path(G) == [1, 3] + assert nx.dag_longest_path(G, default_weight=3) == [1, 2, 3] + + +class TestDagLongestPathLength: + """Unit tests for computing the length of a longest path in a + directed acyclic graph. + + """ + + def test_unweighted(self): + edges = [(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (5, 7)] + G = nx.DiGraph(edges) + assert nx.dag_longest_path_length(G) == 4 + + edges = [(1, 2), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)] + G = nx.DiGraph(edges) + assert nx.dag_longest_path_length(G) == 4 + + # test degenerate graphs + G = nx.DiGraph() + G.add_node(1) + assert nx.dag_longest_path_length(G) == 0 + + def test_undirected_not_implemented(self): + G = nx.Graph() + pytest.raises(nx.NetworkXNotImplemented, nx.dag_longest_path_length, G) + + def test_weighted(self): + edges = [(1, 2, -5), (2, 3, 1), (3, 4, 1), (4, 5, 0), (3, 5, 4), (1, 6, 2)] + G = nx.DiGraph() + G.add_weighted_edges_from(edges) + assert nx.dag_longest_path_length(G) == 5 + + def test_multigraph_unweighted(self): + edges = [(1, 2), (2, 3), (2, 3), (3, 4), (4, 5), (1, 3), (1, 5), (3, 5)] + G = nx.MultiDiGraph(edges) + assert nx.dag_longest_path_length(G) == 4 + + def test_multigraph_weighted(self): + G = nx.MultiDiGraph() + edges = [ + (1, 2, 2), + (2, 3, 2), + (1, 3, 1), + (1, 3, 5), + (1, 3, 2), + ] + G.add_weighted_edges_from(edges) + assert nx.dag_longest_path_length(G) == 5 + + +class TestDAG: + @classmethod + def setup_class(cls): + pass + + def test_topological_sort1(self): + DG = nx.DiGraph([(1, 2), (1, 3), (2, 3)]) + + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + assert tuple(algorithm(DG)) == (1, 2, 3) + + DG.add_edge(3, 2) + + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + pytest.raises(nx.NetworkXUnfeasible, _consume, algorithm(DG)) + + DG.remove_edge(2, 3) + + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + assert tuple(algorithm(DG)) == (1, 3, 2) + + DG.remove_edge(3, 2) + + assert tuple(nx.topological_sort(DG)) in {(1, 2, 3), (1, 3, 2)} + assert tuple(nx.lexicographical_topological_sort(DG)) == (1, 2, 3) + + def test_is_directed_acyclic_graph(self): + G = nx.generators.complete_graph(2) + assert not nx.is_directed_acyclic_graph(G) + assert not nx.is_directed_acyclic_graph(G.to_directed()) + assert not nx.is_directed_acyclic_graph(nx.Graph([(3, 4), (4, 5)])) + assert nx.is_directed_acyclic_graph(nx.DiGraph([(3, 4), (4, 5)])) + + def test_topological_sort2(self): + DG = nx.DiGraph( + { + 1: [2], + 2: [3], + 3: [4], + 4: [5], + 5: [1], + 11: [12], + 12: [13], + 13: [14], + 14: [15], + } + ) + pytest.raises(nx.NetworkXUnfeasible, _consume, nx.topological_sort(DG)) + + assert not nx.is_directed_acyclic_graph(DG) + + DG.remove_edge(1, 2) + _consume(nx.topological_sort(DG)) + assert nx.is_directed_acyclic_graph(DG) + + def test_topological_sort3(self): + DG = nx.DiGraph() + DG.add_edges_from([(1, i) for i in range(2, 5)]) + DG.add_edges_from([(2, i) for i in range(5, 9)]) + DG.add_edges_from([(6, i) for i in range(9, 12)]) + DG.add_edges_from([(4, i) for i in range(12, 15)]) + + def validate(order): + assert isinstance(order, list) + assert set(order) == set(DG) + for u, v in combinations(order, 2): + assert not nx.has_path(DG, v, u) + + validate(list(nx.topological_sort(DG))) + + DG.add_edge(14, 1) + pytest.raises(nx.NetworkXUnfeasible, _consume, nx.topological_sort(DG)) + + def test_topological_sort4(self): + G = nx.Graph() + G.add_edge(1, 2) + # Only directed graphs can be topologically sorted. + pytest.raises(nx.NetworkXError, _consume, nx.topological_sort(G)) + + def test_topological_sort5(self): + G = nx.DiGraph() + G.add_edge(0, 1) + assert list(nx.topological_sort(G)) == [0, 1] + + def test_topological_sort6(self): + for algorithm in [nx.topological_sort, nx.lexicographical_topological_sort]: + + def runtime_error(): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + first = True + for x in algorithm(DG): + if first: + first = False + DG.add_edge(5 - x, 5) + + def unfeasible_error(): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + first = True + for x in algorithm(DG): + if first: + first = False + DG.remove_node(4) + + def runtime_error2(): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + first = True + for x in algorithm(DG): + if first: + first = False + DG.remove_node(2) + + pytest.raises(RuntimeError, runtime_error) + pytest.raises(RuntimeError, runtime_error2) + pytest.raises(nx.NetworkXUnfeasible, unfeasible_error) + + def test_all_topological_sorts_1(self): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 5)]) + assert list(nx.all_topological_sorts(DG)) == [[1, 2, 3, 4, 5]] + + def test_all_topological_sorts_2(self): + DG = nx.DiGraph([(1, 3), (2, 1), (2, 4), (4, 3), (4, 5)]) + assert sorted(nx.all_topological_sorts(DG)) == [ + [2, 1, 4, 3, 5], + [2, 1, 4, 5, 3], + [2, 4, 1, 3, 5], + [2, 4, 1, 5, 3], + [2, 4, 5, 1, 3], + ] + + def test_all_topological_sorts_3(self): + def unfeasible(): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 4), (4, 2), (4, 5)]) + # convert to list to execute generator + list(nx.all_topological_sorts(DG)) + + def not_implemented(): + G = nx.Graph([(1, 2), (2, 3)]) + # convert to list to execute generator + list(nx.all_topological_sorts(G)) + + def not_implemented_2(): + G = nx.MultiGraph([(1, 2), (1, 2), (2, 3)]) + list(nx.all_topological_sorts(G)) + + pytest.raises(nx.NetworkXUnfeasible, unfeasible) + pytest.raises(nx.NetworkXNotImplemented, not_implemented) + pytest.raises(nx.NetworkXNotImplemented, not_implemented_2) + + def test_all_topological_sorts_4(self): + DG = nx.DiGraph() + for i in range(7): + DG.add_node(i) + assert sorted(map(list, permutations(DG.nodes))) == sorted( + nx.all_topological_sorts(DG) + ) + + def test_all_topological_sorts_multigraph_1(self): + DG = nx.MultiDiGraph([(1, 2), (1, 2), (2, 3), (3, 4), (3, 5), (3, 5), (3, 5)]) + assert sorted(nx.all_topological_sorts(DG)) == sorted( + [[1, 2, 3, 4, 5], [1, 2, 3, 5, 4]] + ) + + def test_all_topological_sorts_multigraph_2(self): + N = 9 + edges = [] + for i in range(1, N): + edges.extend([(i, i + 1)] * i) + DG = nx.MultiDiGraph(edges) + assert list(nx.all_topological_sorts(DG)) == [list(range(1, N + 1))] + + def test_ancestors(self): + G = nx.DiGraph() + ancestors = nx.algorithms.dag.ancestors + G.add_edges_from([(1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) + assert ancestors(G, 6) == {1, 2, 4, 5} + assert ancestors(G, 3) == {1, 4} + assert ancestors(G, 1) == set() + pytest.raises(nx.NetworkXError, ancestors, G, 8) + + def test_descendants(self): + G = nx.DiGraph() + descendants = nx.algorithms.dag.descendants + G.add_edges_from([(1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) + assert descendants(G, 1) == {2, 3, 6} + assert descendants(G, 4) == {2, 3, 5, 6} + assert descendants(G, 3) == set() + pytest.raises(nx.NetworkXError, descendants, G, 8) + + def test_transitive_closure(self): + G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + assert edges_equal(nx.transitive_closure(G).edges(), solution) + G = nx.DiGraph([(1, 2), (2, 3), (2, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)] + assert edges_equal(nx.transitive_closure(G).edges(), solution) + G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + solution = [(1, 2), (2, 1), (2, 3), (3, 2), (1, 3), (3, 1)] + soln = sorted(solution + [(n, n) for n in G]) + assert edges_equal(sorted(nx.transitive_closure(G).edges()), soln) + + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + assert edges_equal(sorted(nx.transitive_closure(G).edges()), solution) + + G = nx.MultiGraph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + assert edges_equal(sorted(nx.transitive_closure(G).edges()), solution) + + G = nx.MultiDiGraph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + assert edges_equal(sorted(nx.transitive_closure(G).edges()), solution) + + # test if edge data is copied + G = nx.DiGraph([(1, 2, {"a": 3}), (2, 3, {"b": 0}), (3, 4)]) + H = nx.transitive_closure(G) + for u, v in G.edges(): + assert G.get_edge_data(u, v) == H.get_edge_data(u, v) + + k = 10 + G = nx.DiGraph((i, i + 1, {"f": "b", "weight": i}) for i in range(k)) + H = nx.transitive_closure(G) + for u, v in G.edges(): + assert G.get_edge_data(u, v) == H.get_edge_data(u, v) + + G = nx.Graph() + with pytest.raises(nx.NetworkXError): + nx.transitive_closure(G, reflexive="wrong input") + + def test_reflexive_transitive_closure(self): + G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + soln = sorted(solution + [(n, n) for n in G]) + assert edges_equal(nx.transitive_closure(G).edges(), solution) + assert edges_equal(nx.transitive_closure(G, False).edges(), solution) + assert edges_equal(nx.transitive_closure(G, True).edges(), soln) + assert edges_equal(nx.transitive_closure(G, None).edges(), solution) + + G = nx.DiGraph([(1, 2), (2, 3), (2, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)] + soln = sorted(solution + [(n, n) for n in G]) + assert edges_equal(nx.transitive_closure(G).edges(), solution) + assert edges_equal(nx.transitive_closure(G, False).edges(), solution) + assert edges_equal(nx.transitive_closure(G, True).edges(), soln) + assert edges_equal(nx.transitive_closure(G, None).edges(), solution) + + G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + solution = sorted([(1, 2), (2, 1), (2, 3), (3, 2), (1, 3), (3, 1)]) + soln = sorted(solution + [(n, n) for n in G]) + assert edges_equal(sorted(nx.transitive_closure(G).edges()), soln) + assert edges_equal(sorted(nx.transitive_closure(G, False).edges()), soln) + assert edges_equal(sorted(nx.transitive_closure(G, None).edges()), solution) + assert edges_equal(sorted(nx.transitive_closure(G, True).edges()), soln) + + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + soln = sorted(solution + [(n, n) for n in G]) + assert edges_equal(nx.transitive_closure(G).edges(), solution) + assert edges_equal(nx.transitive_closure(G, False).edges(), solution) + assert edges_equal(nx.transitive_closure(G, True).edges(), soln) + assert edges_equal(nx.transitive_closure(G, None).edges(), solution) + + G = nx.MultiGraph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + soln = sorted(solution + [(n, n) for n in G]) + assert edges_equal(nx.transitive_closure(G).edges(), solution) + assert edges_equal(nx.transitive_closure(G, False).edges(), solution) + assert edges_equal(nx.transitive_closure(G, True).edges(), soln) + assert edges_equal(nx.transitive_closure(G, None).edges(), solution) + + G = nx.MultiDiGraph([(1, 2), (2, 3), (3, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + soln = sorted(solution + [(n, n) for n in G]) + assert edges_equal(nx.transitive_closure(G).edges(), solution) + assert edges_equal(nx.transitive_closure(G, False).edges(), solution) + assert edges_equal(nx.transitive_closure(G, True).edges(), soln) + assert edges_equal(nx.transitive_closure(G, None).edges(), solution) + + def test_transitive_closure_dag(self): + G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + transitive_closure = nx.algorithms.dag.transitive_closure_dag + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + assert edges_equal(transitive_closure(G).edges(), solution) + G = nx.DiGraph([(1, 2), (2, 3), (2, 4)]) + solution = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)] + assert edges_equal(transitive_closure(G).edges(), solution) + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + pytest.raises(nx.NetworkXNotImplemented, transitive_closure, G) + + # test if edge data is copied + G = nx.DiGraph([(1, 2, {"a": 3}), (2, 3, {"b": 0}), (3, 4)]) + H = transitive_closure(G) + for u, v in G.edges(): + assert G.get_edge_data(u, v) == H.get_edge_data(u, v) + + k = 10 + G = nx.DiGraph((i, i + 1, {"foo": "bar", "weight": i}) for i in range(k)) + H = transitive_closure(G) + for u, v in G.edges(): + assert G.get_edge_data(u, v) == H.get_edge_data(u, v) + + def test_transitive_reduction(self): + G = nx.DiGraph([(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]) + transitive_reduction = nx.algorithms.dag.transitive_reduction + solution = [(1, 2), (2, 3), (3, 4)] + assert edges_equal(transitive_reduction(G).edges(), solution) + G = nx.DiGraph([(1, 2), (1, 3), (1, 4), (2, 3), (2, 4)]) + transitive_reduction = nx.algorithms.dag.transitive_reduction + solution = [(1, 2), (2, 3), (2, 4)] + assert edges_equal(transitive_reduction(G).edges(), solution) + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + pytest.raises(nx.NetworkXNotImplemented, transitive_reduction, G) + + def _check_antichains(self, solution, result): + sol = [frozenset(a) for a in solution] + res = [frozenset(a) for a in result] + assert set(sol) == set(res) + + def test_antichains(self): + antichains = nx.algorithms.dag.antichains + G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + solution = [[], [4], [3], [2], [1]] + self._check_antichains(list(antichains(G)), solution) + G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (3, 5), (5, 6), (5, 7)]) + solution = [ + [], + [4], + [7], + [7, 4], + [6], + [6, 4], + [6, 7], + [6, 7, 4], + [5], + [5, 4], + [3], + [3, 4], + [2], + [1], + ] + self._check_antichains(list(antichains(G)), solution) + G = nx.DiGraph([(1, 2), (1, 3), (3, 4), (3, 5), (5, 6)]) + solution = [ + [], + [6], + [5], + [4], + [4, 6], + [4, 5], + [3], + [2], + [2, 6], + [2, 5], + [2, 4], + [2, 4, 6], + [2, 4, 5], + [2, 3], + [1], + ] + self._check_antichains(list(antichains(G)), solution) + G = nx.DiGraph({0: [1, 2], 1: [4], 2: [3], 3: [4]}) + solution = [[], [4], [3], [2], [1], [1, 3], [1, 2], [0]] + self._check_antichains(list(antichains(G)), solution) + G = nx.DiGraph() + self._check_antichains(list(antichains(G)), [[]]) + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2]) + solution = [[], [0], [1], [1, 0], [2], [2, 0], [2, 1], [2, 1, 0]] + self._check_antichains(list(antichains(G)), solution) + + def f(x): + return list(antichains(x)) + + G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + pytest.raises(nx.NetworkXNotImplemented, f, G) + G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + pytest.raises(nx.NetworkXUnfeasible, f, G) + + def test_lexicographical_topological_sort(self): + G = nx.DiGraph([(1, 2), (2, 3), (1, 4), (1, 5), (2, 6)]) + assert list(nx.lexicographical_topological_sort(G)) == [1, 2, 3, 4, 5, 6] + assert list(nx.lexicographical_topological_sort(G, key=lambda x: x)) == [ + 1, + 2, + 3, + 4, + 5, + 6, + ] + assert list(nx.lexicographical_topological_sort(G, key=lambda x: -x)) == [ + 1, + 5, + 4, + 2, + 6, + 3, + ] + + def test_lexicographical_topological_sort2(self): + """ + Check the case of two or more nodes with same key value. + Want to avoid exception raised due to comparing nodes directly. + See Issue #3493 + """ + + class Test_Node: + def __init__(self, n): + self.label = n + self.priority = 1 + + def __repr__(self): + return f"Node({self.label})" + + def sorting_key(node): + return node.priority + + test_nodes = [Test_Node(n) for n in range(4)] + G = nx.DiGraph() + edges = [(0, 1), (0, 2), (0, 3), (2, 3)] + G.add_edges_from((test_nodes[a], test_nodes[b]) for a, b in edges) + + sorting = list(nx.lexicographical_topological_sort(G, key=sorting_key)) + assert sorting == test_nodes + + +def test_topological_generations(): + G = nx.DiGraph( + {1: [2, 3], 2: [4, 5], 3: [7], 4: [], 5: [6, 7], 6: [], 7: []} + ).reverse() + # order within each generation is inconsequential + generations = [sorted(gen) for gen in nx.topological_generations(G)] + expected = [[4, 6, 7], [3, 5], [2], [1]] + assert generations == expected + + MG = nx.MultiDiGraph(G.edges) + MG.add_edge(2, 1) + generations = [sorted(gen) for gen in nx.topological_generations(MG)] + assert generations == expected + + +def test_topological_generations_empty(): + G = nx.DiGraph() + assert list(nx.topological_generations(G)) == [] + + +def test_topological_generations_cycle(): + G = nx.DiGraph([[2, 1], [3, 1], [1, 2]]) + with pytest.raises(nx.NetworkXUnfeasible): + list(nx.topological_generations(G)) + + +def test_is_aperiodic_cycle(): + G = nx.DiGraph() + nx.add_cycle(G, [1, 2, 3, 4]) + assert not nx.is_aperiodic(G) + + +def test_is_aperiodic_cycle2(): + G = nx.DiGraph() + nx.add_cycle(G, [1, 2, 3, 4]) + nx.add_cycle(G, [3, 4, 5, 6, 7]) + assert nx.is_aperiodic(G) + + +def test_is_aperiodic_cycle3(): + G = nx.DiGraph() + nx.add_cycle(G, [1, 2, 3, 4]) + nx.add_cycle(G, [3, 4, 5, 6]) + assert not nx.is_aperiodic(G) + + +def test_is_aperiodic_cycle4(): + G = nx.DiGraph() + nx.add_cycle(G, [1, 2, 3, 4]) + G.add_edge(1, 3) + assert nx.is_aperiodic(G) + + +def test_is_aperiodic_selfloop(): + G = nx.DiGraph() + nx.add_cycle(G, [1, 2, 3, 4]) + G.add_edge(1, 1) + assert nx.is_aperiodic(G) + + +def test_is_aperiodic_null_graph_raises(): + G = nx.DiGraph() + pytest.raises(nx.NetworkXPointlessConcept, nx.is_aperiodic, G) + + +def test_is_aperiodic_undirected_raises(): + G = nx.Graph([(1, 2), (2, 3), (3, 1)]) + pytest.raises(nx.NetworkXError, nx.is_aperiodic, G) + + +def test_is_aperiodic_disconnected_raises(): + G = nx.DiGraph() + nx.add_cycle(G, [0, 1, 2]) + G.add_edge(3, 3) + pytest.raises(nx.NetworkXError, nx.is_aperiodic, G) + + +def test_is_aperiodic_weakly_connected_raises(): + G = nx.DiGraph([(1, 2), (2, 3)]) + pytest.raises(nx.NetworkXError, nx.is_aperiodic, G) + + +def test_is_aperiodic_empty_graph(): + G = nx.empty_graph(create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes."): + nx.is_aperiodic(G) + + +def test_is_aperiodic_bipartite(): + # Bipartite graph + G = nx.DiGraph(nx.davis_southern_women_graph()) + assert not nx.is_aperiodic(G) + + +def test_is_aperiodic_single_node(): + G = nx.DiGraph() + G.add_node(0) + assert not nx.is_aperiodic(G) + G.add_edge(0, 0) + assert nx.is_aperiodic(G) + + +class TestDagToBranching: + """Unit tests for the :func:`networkx.dag_to_branching` function.""" + + def test_single_root(self): + """Tests that a directed acyclic graph with a single degree + zero node produces an arborescence. + + """ + G = nx.DiGraph([(0, 1), (0, 2), (1, 3), (2, 3)]) + B = nx.dag_to_branching(G) + expected = nx.DiGraph([(0, 1), (1, 3), (0, 2), (2, 4)]) + assert nx.is_arborescence(B) + assert nx.is_isomorphic(B, expected) + + def test_multiple_roots(self): + """Tests that a directed acyclic graph with multiple degree zero + nodes creates an arborescence with multiple (weakly) connected + components. + + """ + G = nx.DiGraph([(0, 1), (0, 2), (1, 3), (2, 3), (5, 2)]) + B = nx.dag_to_branching(G) + expected = nx.DiGraph([(0, 1), (1, 3), (0, 2), (2, 4), (5, 6), (6, 7)]) + assert nx.is_branching(B) + assert not nx.is_arborescence(B) + assert nx.is_isomorphic(B, expected) + + # # Attributes are not copied by this function. If they were, this would + # # be a good test to uncomment. + # def test_copy_attributes(self): + # """Tests that node attributes are copied in the branching.""" + # G = nx.DiGraph([(0, 1), (0, 2), (1, 3), (2, 3)]) + # for v in G: + # G.node[v]['label'] = str(v) + # B = nx.dag_to_branching(G) + # # Determine the root node of the branching. + # root = next(v for v, d in B.in_degree() if d == 0) + # assert_equal(B.node[root]['label'], '0') + # children = B[root] + # # Get the left and right children, nodes 1 and 2, respectively. + # left, right = sorted(children, key=lambda v: B.node[v]['label']) + # assert_equal(B.node[left]['label'], '1') + # assert_equal(B.node[right]['label'], '2') + # # Get the left grandchild. + # children = B[left] + # assert_equal(len(children), 1) + # left_grandchild = arbitrary_element(children) + # assert_equal(B.node[left_grandchild]['label'], '3') + # # Get the right grandchild. + # children = B[right] + # assert_equal(len(children), 1) + # right_grandchild = arbitrary_element(children) + # assert_equal(B.node[right_grandchild]['label'], '3') + + def test_already_arborescence(self): + """Tests that a directed acyclic graph that is already an + arborescence produces an isomorphic arborescence as output. + + """ + A = nx.balanced_tree(2, 2, create_using=nx.DiGraph()) + B = nx.dag_to_branching(A) + assert nx.is_isomorphic(A, B) + + def test_already_branching(self): + """Tests that a directed acyclic graph that is already a + branching produces an isomorphic branching as output. + + """ + T1 = nx.balanced_tree(2, 2, create_using=nx.DiGraph()) + T2 = nx.balanced_tree(2, 2, create_using=nx.DiGraph()) + G = nx.disjoint_union(T1, T2) + B = nx.dag_to_branching(G) + assert nx.is_isomorphic(G, B) + + def test_not_acyclic(self): + """Tests that a non-acyclic graph causes an exception.""" + with pytest.raises(nx.HasACycle): + G = nx.DiGraph(pairwise("abc", cyclic=True)) + nx.dag_to_branching(G) + + def test_undirected(self): + with pytest.raises(nx.NetworkXNotImplemented): + nx.dag_to_branching(nx.Graph()) + + def test_multigraph(self): + with pytest.raises(nx.NetworkXNotImplemented): + nx.dag_to_branching(nx.MultiGraph()) + + def test_multidigraph(self): + with pytest.raises(nx.NetworkXNotImplemented): + nx.dag_to_branching(nx.MultiDiGraph()) + + +def test_ancestors_descendants_undirected(): + """Regression test to ensure ancestors and descendants work as expected on + undirected graphs.""" + G = nx.path_graph(5) + nx.ancestors(G, 2) == nx.descendants(G, 2) == {0, 1, 3, 4} + + +def test_compute_v_structures_raise(): + G = nx.Graph() + with pytest.raises(nx.NetworkXNotImplemented, match="for undirected type"): + nx.compute_v_structures(G) + + +def test_compute_v_structures(): + edges = [(0, 1), (0, 2), (3, 2)] + G = nx.DiGraph(edges) + + v_structs = set(nx.compute_v_structures(G)) + assert len(v_structs) == 1 + assert (0, 2, 3) in v_structs + + edges = [("A", "B"), ("C", "B"), ("B", "D"), ("D", "E"), ("G", "E")] + G = nx.DiGraph(edges) + v_structs = set(nx.compute_v_structures(G)) + assert len(v_structs) == 2 + + +def test_compute_v_structures_deprecated(): + G = nx.DiGraph() + with pytest.deprecated_call(): + nx.compute_v_structures(G) + + +def test_v_structures_raise(): + G = nx.Graph() + with pytest.raises(nx.NetworkXNotImplemented, match="for undirected type"): + nx.dag.v_structures(G) + + +@pytest.mark.parametrize( + ("edgelist", "expected"), + ( + ( + [(0, 1), (0, 2), (3, 2)], + {(0, 2, 3)}, + ), + ( + [("A", "B"), ("C", "B"), ("D", "G"), ("D", "E"), ("G", "E")], + {("A", "B", "C")}, + ), + ([(0, 1), (2, 1), (0, 2)], set()), # adjacent parents case: see gh-7385 + ), +) +def test_v_structures(edgelist, expected): + G = nx.DiGraph(edgelist) + v_structs = set(nx.dag.v_structures(G)) + assert v_structs == expected + + +def test_colliders_raise(): + G = nx.Graph() + with pytest.raises(nx.NetworkXNotImplemented, match="for undirected type"): + nx.dag.colliders(G) + + +@pytest.mark.parametrize( + ("edgelist", "expected"), + ( + ( + [(0, 1), (0, 2), (3, 2)], + {(0, 2, 3)}, + ), + ( + [("A", "B"), ("C", "B"), ("D", "G"), ("D", "E"), ("G", "E")], + {("A", "B", "C"), ("D", "E", "G")}, + ), + ), +) +def test_colliders(edgelist, expected): + G = nx.DiGraph(edgelist) + colliders = set(nx.dag.colliders(G)) + assert colliders == expected diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_measures.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_measures.py new file mode 100644 index 0000000..1668fef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_measures.py @@ -0,0 +1,831 @@ +import itertools +import math +from random import Random + +import pytest + +import networkx as nx +from networkx import convert_node_labels_to_integers as cnlti +from networkx.algorithms.distance_measures import _extrema_bounding + + +def test__extrema_bounding_invalid_compute_kwarg(): + G = nx.path_graph(3) + with pytest.raises(ValueError, match="compute must be one of"): + _extrema_bounding(G, compute="spam") + + +class TestDistance: + def setup_method(self): + self.G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + + @pytest.mark.parametrize("seed", list(range(10))) + @pytest.mark.parametrize("n", list(range(10, 20))) + @pytest.mark.parametrize("prob", [x / 10 for x in range(0, 10, 2)]) + def test_use_bounds_on_off_consistency(self, seed, n, prob): + """Test for consistency of distance metrics when using usebounds=True. + + We validate consistency for `networkx.diameter`, `networkx.radius`, `networkx.periphery` + and `networkx.center` when passing `usebounds=True`. Expectation is that method + returns the same result whether we pass usebounds=True or not. + + For this we generate random connected graphs and validate method returns the same. + """ + metrics = [nx.diameter, nx.radius, nx.periphery, nx.center] + max_weight = [5, 10, 1000] + rng = Random(seed) + # we compose it with a random tree to ensure graph is connected + G = nx.compose( + nx.random_labeled_tree(n, seed=rng), + nx.erdos_renyi_graph(n, prob, seed=rng), + ) + for metric in metrics: + # checking unweighted case + assert metric(G) == metric(G, usebounds=True) + for w in max_weight: + for u, v in G.edges(): + G[u][v]["w"] = rng.randint(0, w) + # checking weighted case + assert metric(G, weight="w") == metric(G, weight="w", usebounds=True) + + def test_eccentricity(self): + assert nx.eccentricity(self.G, 1) == 6 + e = nx.eccentricity(self.G) + assert e[1] == 6 + + sp = dict(nx.shortest_path_length(self.G)) + e = nx.eccentricity(self.G, sp=sp) + assert e[1] == 6 + + e = nx.eccentricity(self.G, v=1) + assert e == 6 + + # This behavior changed in version 1.8 (ticket #739) + e = nx.eccentricity(self.G, v=[1, 1]) + assert e[1] == 6 + e = nx.eccentricity(self.G, v=[1, 2]) + assert e[1] == 6 + + # test against graph with one node + G = nx.path_graph(1) + e = nx.eccentricity(G) + assert e[0] == 0 + e = nx.eccentricity(G, v=0) + assert e == 0 + pytest.raises(nx.NetworkXError, nx.eccentricity, G, 1) + + # test against empty graph + G = nx.empty_graph() + e = nx.eccentricity(G) + assert e == {} + + def test_diameter(self): + assert nx.diameter(self.G) == 6 + + def test_harmonic_diameter(self): + assert nx.harmonic_diameter(self.G) == pytest.approx(2.0477815699658715) + assert nx.harmonic_diameter(nx.star_graph(3)) == pytest.approx(1.333333) + + def test_harmonic_diameter_empty(self): + assert math.isnan(nx.harmonic_diameter(nx.empty_graph())) + + def test_harmonic_diameter_single_node(self): + assert math.isnan(nx.harmonic_diameter(nx.empty_graph(1))) + + def test_harmonic_diameter_discrete(self): + assert math.isinf(nx.harmonic_diameter(nx.empty_graph(3))) + + def test_harmonic_diameter_not_strongly_connected(self): + DG = nx.DiGraph() + DG.add_edge(0, 1) + assert nx.harmonic_diameter(DG) == 2 + + def test_harmonic_diameter_weighted_paths(self): + G = nx.star_graph(3) + # check defaults + G.add_weighted_edges_from([(*e, 1) for i, e in enumerate(G.edges)], "weight") + assert nx.harmonic_diameter(G) == pytest.approx(1.333333) + assert nx.harmonic_diameter(G, weight="weight") == pytest.approx(1.333333) + + # check impact of weights and alternate weight name + G.add_weighted_edges_from([(*e, i) for i, e in enumerate(G.edges)], "dist") + assert nx.harmonic_diameter(G, weight="dist") == pytest.approx(1.8) + + def test_radius(self): + assert nx.radius(self.G) == 4 + + def test_periphery(self): + assert set(nx.periphery(self.G)) == {1, 4, 13, 16} + + def test_center_simple_tree(self): + G = nx.Graph([(1, 2), (1, 3), (2, 4), (2, 5)]) + assert nx.center(G) == [1, 2] + + @pytest.mark.parametrize("r", range(2, 5)) + @pytest.mark.parametrize("h", range(1, 5)) + def test_center_balanced_tree(self, r, h): + G = nx.balanced_tree(r, h) + assert nx.center(G) == [0] + + def test_center(self): + assert set(nx.center(self.G)) == {6, 7, 10, 11} + + @pytest.mark.parametrize("n", [1, 2, 99, 100]) + def test_center_path_graphs(self, n): + G = nx.path_graph(n) + expected = {(n - 1) // 2, math.ceil((n - 1) / 2)} + assert set(nx.center(G)) == expected + + def test_bound_diameter(self): + assert nx.diameter(self.G, usebounds=True) == 6 + + def test_bound_radius(self): + assert nx.radius(self.G, usebounds=True) == 4 + + def test_bound_periphery(self): + result = {1, 4, 13, 16} + assert set(nx.periphery(self.G, usebounds=True)) == result + + def test_bound_center(self): + result = {6, 7, 10, 11} + assert set(nx.center(self.G, usebounds=True)) == result + + def test_radius_exception(self): + G = nx.Graph() + G.add_edge(1, 2) + G.add_edge(3, 4) + pytest.raises(nx.NetworkXError, nx.diameter, G) + + def test_eccentricity_infinite(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph([(1, 2), (3, 4)]) + e = nx.eccentricity(G) + + def test_eccentricity_undirected_not_connected(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph([(1, 2), (3, 4)]) + e = nx.eccentricity(G, sp=1) + + def test_eccentricity_directed_weakly_connected(self): + with pytest.raises(nx.NetworkXError): + DG = nx.DiGraph([(1, 2), (1, 3)]) + nx.eccentricity(DG) + + +class TestWeightedDistance: + def setup_method(self): + G = nx.Graph() + G.add_edge(0, 1, weight=0.6, cost=0.6, high_cost=6) + G.add_edge(0, 2, weight=0.2, cost=0.2, high_cost=2) + G.add_edge(2, 3, weight=0.1, cost=0.1, high_cost=1) + G.add_edge(2, 4, weight=0.7, cost=0.7, high_cost=7) + G.add_edge(2, 5, weight=0.9, cost=0.9, high_cost=9) + G.add_edge(1, 5, weight=0.3, cost=0.3, high_cost=3) + self.G = G + self.weight_fn = lambda v, u, e: 2 + + def test_eccentricity_weight_None(self): + assert nx.eccentricity(self.G, 1, weight=None) == 3 + e = nx.eccentricity(self.G, weight=None) + assert e[1] == 3 + + e = nx.eccentricity(self.G, v=1, weight=None) + assert e == 3 + + # This behavior changed in version 1.8 (ticket #739) + e = nx.eccentricity(self.G, v=[1, 1], weight=None) + assert e[1] == 3 + e = nx.eccentricity(self.G, v=[1, 2], weight=None) + assert e[1] == 3 + + def test_eccentricity_weight_attr(self): + assert nx.eccentricity(self.G, 1, weight="weight") == 1.5 + e = nx.eccentricity(self.G, weight="weight") + assert ( + e + == nx.eccentricity(self.G, weight="cost") + != nx.eccentricity(self.G, weight="high_cost") + ) + assert e[1] == 1.5 + + e = nx.eccentricity(self.G, v=1, weight="weight") + assert e == 1.5 + + # This behavior changed in version 1.8 (ticket #739) + e = nx.eccentricity(self.G, v=[1, 1], weight="weight") + assert e[1] == 1.5 + e = nx.eccentricity(self.G, v=[1, 2], weight="weight") + assert e[1] == 1.5 + + def test_eccentricity_weight_fn(self): + assert nx.eccentricity(self.G, 1, weight=self.weight_fn) == 6 + e = nx.eccentricity(self.G, weight=self.weight_fn) + assert e[1] == 6 + + e = nx.eccentricity(self.G, v=1, weight=self.weight_fn) + assert e == 6 + + # This behavior changed in version 1.8 (ticket #739) + e = nx.eccentricity(self.G, v=[1, 1], weight=self.weight_fn) + assert e[1] == 6 + e = nx.eccentricity(self.G, v=[1, 2], weight=self.weight_fn) + assert e[1] == 6 + + def test_diameter_weight_None(self): + assert nx.diameter(self.G, weight=None) == 3 + + def test_diameter_weight_attr(self): + assert ( + nx.diameter(self.G, weight="weight") + == nx.diameter(self.G, weight="cost") + == 1.6 + != nx.diameter(self.G, weight="high_cost") + ) + + def test_diameter_weight_fn(self): + assert nx.diameter(self.G, weight=self.weight_fn) == 6 + + def test_radius_weight_None(self): + assert pytest.approx(nx.radius(self.G, weight=None)) == 2 + + def test_radius_weight_attr(self): + assert ( + pytest.approx(nx.radius(self.G, weight="weight")) + == pytest.approx(nx.radius(self.G, weight="cost")) + == 0.9 + != nx.radius(self.G, weight="high_cost") + ) + + def test_radius_weight_fn(self): + assert nx.radius(self.G, weight=self.weight_fn) == 4 + + def test_periphery_weight_None(self): + for v in set(nx.periphery(self.G, weight=None)): + assert nx.eccentricity(self.G, v, weight=None) == nx.diameter( + self.G, weight=None + ) + + def test_periphery_weight_attr(self): + periphery = set(nx.periphery(self.G, weight="weight")) + assert ( + periphery + == set(nx.periphery(self.G, weight="cost")) + == set(nx.periphery(self.G, weight="high_cost")) + ) + for v in periphery: + assert ( + nx.eccentricity(self.G, v, weight="high_cost") + != nx.eccentricity(self.G, v, weight="weight") + == nx.eccentricity(self.G, v, weight="cost") + == nx.diameter(self.G, weight="weight") + == nx.diameter(self.G, weight="cost") + != nx.diameter(self.G, weight="high_cost") + ) + assert nx.eccentricity(self.G, v, weight="high_cost") == nx.diameter( + self.G, weight="high_cost" + ) + + def test_periphery_weight_fn(self): + for v in set(nx.periphery(self.G, weight=self.weight_fn)): + assert nx.eccentricity(self.G, v, weight=self.weight_fn) == nx.diameter( + self.G, weight=self.weight_fn + ) + + def test_center_weight_None(self): + for v in set(nx.center(self.G, weight=None)): + assert pytest.approx(nx.eccentricity(self.G, v, weight=None)) == nx.radius( + self.G, weight=None + ) + + def test_center_weight_attr(self): + center = set(nx.center(self.G, weight="weight")) + assert ( + center + == set(nx.center(self.G, weight="cost")) + != set(nx.center(self.G, weight="high_cost")) + ) + for v in center: + assert ( + nx.eccentricity(self.G, v, weight="high_cost") + != pytest.approx(nx.eccentricity(self.G, v, weight="weight")) + == pytest.approx(nx.eccentricity(self.G, v, weight="cost")) + == nx.radius(self.G, weight="weight") + == nx.radius(self.G, weight="cost") + != nx.radius(self.G, weight="high_cost") + ) + assert nx.eccentricity(self.G, v, weight="high_cost") == nx.radius( + self.G, weight="high_cost" + ) + + def test_center_weight_fn(self): + for v in set(nx.center(self.G, weight=self.weight_fn)): + assert nx.eccentricity(self.G, v, weight=self.weight_fn) == nx.radius( + self.G, weight=self.weight_fn + ) + + def test_bound_diameter_weight_None(self): + assert nx.diameter(self.G, usebounds=True, weight=None) == 3 + + def test_bound_diameter_weight_attr(self): + assert ( + nx.diameter(self.G, usebounds=True, weight="high_cost") + != nx.diameter(self.G, usebounds=True, weight="weight") + == nx.diameter(self.G, usebounds=True, weight="cost") + == 1.6 + != nx.diameter(self.G, usebounds=True, weight="high_cost") + ) + assert nx.diameter(self.G, usebounds=True, weight="high_cost") == nx.diameter( + self.G, usebounds=True, weight="high_cost" + ) + + def test_bound_diameter_weight_fn(self): + assert nx.diameter(self.G, usebounds=True, weight=self.weight_fn) == 6 + + def test_bound_radius_weight_None(self): + assert pytest.approx(nx.radius(self.G, usebounds=True, weight=None)) == 2 + + def test_bound_radius_weight_attr(self): + assert ( + nx.radius(self.G, usebounds=True, weight="high_cost") + != pytest.approx(nx.radius(self.G, usebounds=True, weight="weight")) + == pytest.approx(nx.radius(self.G, usebounds=True, weight="cost")) + == 0.9 + != nx.radius(self.G, usebounds=True, weight="high_cost") + ) + assert nx.radius(self.G, usebounds=True, weight="high_cost") == nx.radius( + self.G, usebounds=True, weight="high_cost" + ) + + def test_bound_radius_weight_fn(self): + assert nx.radius(self.G, usebounds=True, weight=self.weight_fn) == 4 + + def test_bound_periphery_weight_None(self): + result = {1, 3, 4} + assert set(nx.periphery(self.G, usebounds=True, weight=None)) == result + + def test_bound_periphery_weight_attr(self): + result = {4, 5} + assert ( + set(nx.periphery(self.G, usebounds=True, weight="weight")) + == set(nx.periphery(self.G, usebounds=True, weight="cost")) + == result + ) + + def test_bound_periphery_weight_fn(self): + result = {1, 3, 4} + assert ( + set(nx.periphery(self.G, usebounds=True, weight=self.weight_fn)) == result + ) + + def test_bound_center_weight_None(self): + result = {0, 2, 5} + assert set(nx.center(self.G, usebounds=True, weight=None)) == result + + def test_bound_center_weight_attr(self): + result = {0} + assert ( + set(nx.center(self.G, usebounds=True, weight="weight")) + == set(nx.center(self.G, usebounds=True, weight="cost")) + == result + ) + + def test_bound_center_weight_fn(self): + result = {0, 2, 5} + assert set(nx.center(self.G, usebounds=True, weight=self.weight_fn)) == result + + +class TestResistanceDistance: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + sp = pytest.importorskip("scipy") + + def setup_method(self): + G = nx.Graph() + G.add_edge(1, 2, weight=2) + G.add_edge(2, 3, weight=4) + G.add_edge(3, 4, weight=1) + G.add_edge(1, 4, weight=3) + self.G = G + + def test_resistance_distance_directed_graph(self): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXNotImplemented): + nx.resistance_distance(G) + + def test_resistance_distance_empty(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXError): + nx.resistance_distance(G) + + def test_resistance_distance_not_connected(self): + with pytest.raises(nx.NetworkXError): + self.G.add_node(5) + nx.resistance_distance(self.G, 1, 5) + + def test_resistance_distance_nodeA_not_in_graph(self): + with pytest.raises(nx.NetworkXError): + nx.resistance_distance(self.G, 9, 1) + + def test_resistance_distance_nodeB_not_in_graph(self): + with pytest.raises(nx.NetworkXError): + nx.resistance_distance(self.G, 1, 9) + + def test_resistance_distance(self): + rd = nx.resistance_distance(self.G, 1, 3, "weight", True) + test_data = 1 / (1 / (2 + 4) + 1 / (1 + 3)) + assert round(rd, 5) == round(test_data, 5) + + def test_resistance_distance_noinv(self): + rd = nx.resistance_distance(self.G, 1, 3, "weight", False) + test_data = 1 / (1 / (1 / 2 + 1 / 4) + 1 / (1 / 1 + 1 / 3)) + assert round(rd, 5) == round(test_data, 5) + + def test_resistance_distance_no_weight(self): + rd = nx.resistance_distance(self.G, 1, 3) + assert round(rd, 5) == 1 + + def test_resistance_distance_neg_weight(self): + self.G[2][3]["weight"] = -4 + rd = nx.resistance_distance(self.G, 1, 3, "weight", True) + test_data = 1 / (1 / (2 + -4) + 1 / (1 + 3)) + assert round(rd, 5) == round(test_data, 5) + + def test_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2, weight=2) + G.add_edge(2, 3, weight=4) + G.add_edge(3, 4, weight=1) + G.add_edge(1, 4, weight=3) + rd = nx.resistance_distance(G, 1, 3, "weight", True) + assert np.isclose(rd, 1 / (1 / (2 + 4) + 1 / (1 + 3))) + + def test_resistance_distance_div0(self): + with pytest.raises(ZeroDivisionError): + self.G[1][2]["weight"] = 0 + nx.resistance_distance(self.G, 1, 3, "weight") + + def test_resistance_distance_same_node(self): + assert nx.resistance_distance(self.G, 1, 1) == 0 + + def test_resistance_distance_only_nodeA(self): + rd = nx.resistance_distance(self.G, nodeA=1) + test_data = {} + test_data[1] = 0 + test_data[2] = 0.75 + test_data[3] = 1 + test_data[4] = 0.75 + assert isinstance(rd, dict) + assert sorted(rd.keys()) == sorted(test_data.keys()) + for key in rd: + assert np.isclose(rd[key], test_data[key]) + + def test_resistance_distance_only_nodeB(self): + rd = nx.resistance_distance(self.G, nodeB=1) + test_data = {} + test_data[1] = 0 + test_data[2] = 0.75 + test_data[3] = 1 + test_data[4] = 0.75 + assert isinstance(rd, dict) + assert sorted(rd.keys()) == sorted(test_data.keys()) + for key in rd: + assert np.isclose(rd[key], test_data[key]) + + def test_resistance_distance_all(self): + rd = nx.resistance_distance(self.G) + assert isinstance(rd, dict) + assert round(rd[1][3], 5) == 1 + + +class TestEffectiveGraphResistance: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + sp = pytest.importorskip("scipy") + + def setup_method(self): + G = nx.Graph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=4) + self.G = G + + def test_effective_graph_resistance_directed_graph(self): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXNotImplemented): + nx.effective_graph_resistance(G) + + def test_effective_graph_resistance_empty(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXError): + nx.effective_graph_resistance(G) + + def test_effective_graph_resistance_not_connected(self): + G = nx.Graph([(1, 2), (3, 4)]) + RG = nx.effective_graph_resistance(G) + assert np.isinf(RG) + + def test_effective_graph_resistance(self): + RG = nx.effective_graph_resistance(self.G, "weight", True) + rd12 = 1 / (1 / (1 + 4) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / 4) + rd23 = 1 / (1 / (2 + 4) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_noinv(self): + RG = nx.effective_graph_resistance(self.G, "weight", False) + rd12 = 1 / (1 / (1 / 1 + 1 / 4) + 1 / (1 / 2)) + rd13 = 1 / (1 / (1 / 1 + 1 / 2) + 1 / (1 / 4)) + rd23 = 1 / (1 / (1 / 2 + 1 / 4) + 1 / (1 / 1)) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_no_weight(self): + RG = nx.effective_graph_resistance(self.G) + assert np.isclose(RG, 2) + + def test_effective_graph_resistance_neg_weight(self): + self.G[2][3]["weight"] = -4 + RG = nx.effective_graph_resistance(self.G, "weight", True) + rd12 = 1 / (1 / (1 + -4) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / (-4)) + rd23 = 1 / (1 / (2 + -4) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=1) + G.add_edge(2, 3, weight=3) + RG = nx.effective_graph_resistance(G, "weight", True) + edge23 = 1 / (1 / 1 + 1 / 3) + rd12 = 1 / (1 / (1 + edge23) + 1 / 2) + rd13 = 1 / (1 / (1 + 2) + 1 / edge23) + rd23 = 1 / (1 / (2 + edge23) + 1 / 1) + assert np.isclose(RG, rd12 + rd13 + rd23) + + def test_effective_graph_resistance_div0(self): + with pytest.raises(ZeroDivisionError): + self.G[1][2]["weight"] = 0 + nx.effective_graph_resistance(self.G, "weight") + + def test_effective_graph_resistance_complete_graph(self): + N = 10 + G = nx.complete_graph(N) + RG = nx.effective_graph_resistance(G) + assert np.isclose(RG, N - 1) + + def test_effective_graph_resistance_path_graph(self): + N = 10 + G = nx.path_graph(N) + RG = nx.effective_graph_resistance(G) + assert np.isclose(RG, (N - 1) * N * (N + 1) // 6) + + +class TestBarycenter: + """Test :func:`networkx.algorithms.distance_measures.barycenter`.""" + + def barycenter_as_subgraph(self, g, **kwargs): + """Return the subgraph induced on the barycenter of g""" + b = nx.barycenter(g, **kwargs) + assert isinstance(b, list) + assert set(b) <= set(g) + return g.subgraph(b) + + def test_must_be_connected(self): + pytest.raises(nx.NetworkXNoPath, nx.barycenter, nx.empty_graph(5)) + + def test_sp_kwarg(self): + # Complete graph K_5. Normally it works... + K_5 = nx.complete_graph(5) + sp = dict(nx.shortest_path_length(K_5)) + assert nx.barycenter(K_5, sp=sp) == list(K_5) + + # ...but not with the weight argument + for u, v, data in K_5.edges.data(): + data["weight"] = 1 + pytest.raises(ValueError, nx.barycenter, K_5, sp=sp, weight="weight") + + # ...and a corrupted sp can make it seem like K_5 is disconnected + del sp[0][1] + pytest.raises(nx.NetworkXNoPath, nx.barycenter, K_5, sp=sp) + + def test_trees(self): + """The barycenter of a tree is a single vertex or an edge. + + See [West01]_, p. 78. + """ + prng = Random(0xDEADBEEF) + for i in range(50): + RT = nx.random_labeled_tree(prng.randint(1, 75), seed=prng) + b = self.barycenter_as_subgraph(RT) + if len(b) == 2: + assert b.size() == 1 + else: + assert len(b) == 1 + assert b.size() == 0 + + def test_this_one_specific_tree(self): + """Test the tree pictured at the bottom of [West01]_, p. 78.""" + g = nx.Graph( + { + "a": ["b"], + "b": ["a", "x"], + "x": ["b", "y"], + "y": ["x", "z"], + "z": ["y", 0, 1, 2, 3, 4], + 0: ["z"], + 1: ["z"], + 2: ["z"], + 3: ["z"], + 4: ["z"], + } + ) + b = self.barycenter_as_subgraph(g, attr="barycentricity") + assert list(b) == ["z"] + assert not b.edges + expected_barycentricity = { + 0: 23, + 1: 23, + 2: 23, + 3: 23, + 4: 23, + "a": 35, + "b": 27, + "x": 21, + "y": 17, + "z": 15, + } + for node, barycentricity in expected_barycentricity.items(): + assert g.nodes[node]["barycentricity"] == barycentricity + + # Doubling weights should do nothing but double the barycentricities + for edge in g.edges: + g.edges[edge]["weight"] = 2 + b = self.barycenter_as_subgraph(g, weight="weight", attr="barycentricity2") + assert list(b) == ["z"] + assert not b.edges + for node, barycentricity in expected_barycentricity.items(): + assert g.nodes[node]["barycentricity2"] == barycentricity * 2 + + +class TestKemenyConstant: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + sp = pytest.importorskip("scipy") + + def setup_method(self): + G = nx.Graph() + w12 = 2 + w13 = 3 + w23 = 4 + G.add_edge(1, 2, weight=w12) + G.add_edge(1, 3, weight=w13) + G.add_edge(2, 3, weight=w23) + self.G = G + + def test_kemeny_constant_directed(self): + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(1, 3) + G.add_edge(2, 3) + with pytest.raises(nx.NetworkXNotImplemented): + nx.kemeny_constant(G) + + def test_kemeny_constant_not_connected(self): + self.G.add_node(5) + with pytest.raises(nx.NetworkXError): + nx.kemeny_constant(self.G) + + def test_kemeny_constant_no_nodes(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXError): + nx.kemeny_constant(G) + + def test_kemeny_constant_negative_weight(self): + G = nx.Graph() + w12 = 2 + w13 = 3 + w23 = -10 + G.add_edge(1, 2, weight=w12) + G.add_edge(1, 3, weight=w13) + G.add_edge(2, 3, weight=w23) + with pytest.raises(nx.NetworkXError): + nx.kemeny_constant(G, weight="weight") + + def test_kemeny_constant(self): + K = nx.kemeny_constant(self.G, weight="weight") + w12 = 2 + w13 = 3 + w23 = 4 + test_data = ( + 3 + / 2 + * (w12 + w13) + * (w12 + w23) + * (w13 + w23) + / ( + w12**2 * (w13 + w23) + + w13**2 * (w12 + w23) + + w23**2 * (w12 + w13) + + 3 * w12 * w13 * w23 + ) + ) + assert np.isclose(K, test_data) + + def test_kemeny_constant_no_weight(self): + K = nx.kemeny_constant(self.G) + assert np.isclose(K, 4 / 3) + + def test_kemeny_constant_multigraph(self): + G = nx.MultiGraph() + w12_1 = 2 + w12_2 = 1 + w13 = 3 + w23 = 4 + G.add_edge(1, 2, weight=w12_1) + G.add_edge(1, 2, weight=w12_2) + G.add_edge(1, 3, weight=w13) + G.add_edge(2, 3, weight=w23) + K = nx.kemeny_constant(G, weight="weight") + w12 = w12_1 + w12_2 + test_data = ( + 3 + / 2 + * (w12 + w13) + * (w12 + w23) + * (w13 + w23) + / ( + w12**2 * (w13 + w23) + + w13**2 * (w12 + w23) + + w23**2 * (w12 + w13) + + 3 * w12 * w13 * w23 + ) + ) + assert np.isclose(K, test_data) + + def test_kemeny_constant_weight0(self): + G = nx.Graph() + w12 = 0 + w13 = 3 + w23 = 4 + G.add_edge(1, 2, weight=w12) + G.add_edge(1, 3, weight=w13) + G.add_edge(2, 3, weight=w23) + K = nx.kemeny_constant(G, weight="weight") + test_data = ( + 3 + / 2 + * (w12 + w13) + * (w12 + w23) + * (w13 + w23) + / ( + w12**2 * (w13 + w23) + + w13**2 * (w12 + w23) + + w23**2 * (w12 + w13) + + 3 * w12 * w13 * w23 + ) + ) + assert np.isclose(K, test_data) + + def test_kemeny_constant_selfloop(self): + G = nx.Graph() + w11 = 1 + w12 = 2 + w13 = 3 + w23 = 4 + G.add_edge(1, 1, weight=w11) + G.add_edge(1, 2, weight=w12) + G.add_edge(1, 3, weight=w13) + G.add_edge(2, 3, weight=w23) + K = nx.kemeny_constant(G, weight="weight") + test_data = ( + (2 * w11 + 3 * w12 + 3 * w13) + * (w12 + w23) + * (w13 + w23) + / ( + (w12 * w13 + w12 * w23 + w13 * w23) + * (w11 + 2 * w12 + 2 * w13 + 2 * w23) + ) + ) + assert np.isclose(K, test_data) + + def test_kemeny_constant_complete_bipartite_graph(self): + # Theorem 1 in https://www.sciencedirect.com/science/article/pii/S0166218X20302912 + n1 = 5 + n2 = 4 + G = nx.complete_bipartite_graph(n1, n2) + K = nx.kemeny_constant(G) + assert np.isclose(K, n1 + n2 - 3 / 2) + + def test_kemeny_constant_path_graph(self): + # Theorem 2 in https://www.sciencedirect.com/science/article/pii/S0166218X20302912 + n = 10 + G = nx.path_graph(n) + K = nx.kemeny_constant(G) + assert np.isclose(K, n**2 / 3 - 2 * n / 3 + 1 / 2) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_regular.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_regular.py new file mode 100644 index 0000000..545fb6d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_distance_regular.py @@ -0,0 +1,85 @@ +import pytest + +import networkx as nx +from networkx import is_strongly_regular + + +@pytest.mark.parametrize( + "f", (nx.is_distance_regular, nx.intersection_array, nx.is_strongly_regular) +) +@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph)) +def test_raises_on_directed_and_multigraphs(f, graph_constructor): + G = graph_constructor([(0, 1), (1, 2)]) + with pytest.raises(nx.NetworkXNotImplemented): + f(G) + + +class TestDistanceRegular: + def test_is_distance_regular(self): + assert nx.is_distance_regular(nx.icosahedral_graph()) + assert nx.is_distance_regular(nx.petersen_graph()) + assert nx.is_distance_regular(nx.cubical_graph()) + assert nx.is_distance_regular(nx.complete_bipartite_graph(3, 3)) + assert nx.is_distance_regular(nx.tetrahedral_graph()) + assert nx.is_distance_regular(nx.dodecahedral_graph()) + assert nx.is_distance_regular(nx.pappus_graph()) + assert nx.is_distance_regular(nx.heawood_graph()) + assert nx.is_distance_regular(nx.cycle_graph(3)) + # no distance regular + assert not nx.is_distance_regular(nx.path_graph(4)) + + def test_not_connected(self): + G = nx.cycle_graph(4) + nx.add_cycle(G, [5, 6, 7]) + assert not nx.is_distance_regular(G) + + def test_global_parameters(self): + b, c = nx.intersection_array(nx.cycle_graph(5)) + g = nx.global_parameters(b, c) + assert list(g) == [(0, 0, 2), (1, 0, 1), (1, 1, 0)] + b, c = nx.intersection_array(nx.cycle_graph(3)) + g = nx.global_parameters(b, c) + assert list(g) == [(0, 0, 2), (1, 1, 0)] + + def test_intersection_array(self): + b, c = nx.intersection_array(nx.cycle_graph(5)) + assert b == [2, 1] + assert c == [1, 1] + b, c = nx.intersection_array(nx.dodecahedral_graph()) + assert b == [3, 2, 1, 1, 1] + assert c == [1, 1, 1, 2, 3] + b, c = nx.intersection_array(nx.icosahedral_graph()) + assert b == [5, 2, 1] + assert c == [1, 2, 5] + + +@pytest.mark.parametrize("f", (nx.is_distance_regular, nx.is_strongly_regular)) +def test_empty_graph_raises(f): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"): + f(G) + + +class TestStronglyRegular: + """Unit tests for the :func:`~networkx.is_strongly_regular` + function. + + """ + + def test_cycle_graph(self): + """Tests that the cycle graph on five vertices is strongly + regular. + + """ + G = nx.cycle_graph(5) + assert is_strongly_regular(G) + + def test_petersen_graph(self): + """Tests that the Petersen graph is strongly regular.""" + G = nx.petersen_graph() + assert is_strongly_regular(G) + + def test_path_graph(self): + """Tests that the path graph is not strongly regular.""" + G = nx.path_graph(4) + assert not is_strongly_regular(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominance.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominance.py new file mode 100644 index 0000000..e79807f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominance.py @@ -0,0 +1,286 @@ +import pytest + +import networkx as nx + + +class TestImmediateDominators: + def test_exceptions(self): + G = nx.Graph() + G.add_node(0) + pytest.raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0) + G = nx.MultiGraph(G) + pytest.raises(nx.NetworkXNotImplemented, nx.immediate_dominators, G, 0) + G = nx.DiGraph([[0, 0]]) + pytest.raises(nx.NetworkXError, nx.immediate_dominators, G, 1) + + def test_singleton(self): + G = nx.DiGraph() + G.add_node(0) + assert nx.immediate_dominators(G, 0) == {0: 0} + G.add_edge(0, 0) + assert nx.immediate_dominators(G, 0) == {0: 0} + + def test_path(self): + n = 5 + G = nx.path_graph(n, create_using=nx.DiGraph()) + assert nx.immediate_dominators(G, 0) == {i: max(i - 1, 0) for i in range(n)} + + def test_cycle(self): + n = 5 + G = nx.cycle_graph(n, create_using=nx.DiGraph()) + assert nx.immediate_dominators(G, 0) == {i: max(i - 1, 0) for i in range(n)} + + def test_unreachable(self): + n = 5 + assert n > 1 + G = nx.path_graph(n, create_using=nx.DiGraph()) + assert nx.immediate_dominators(G, n // 2) == { + i: max(i - 1, n // 2) for i in range(n // 2, n) + } + + def test_irreducible1(self): + """ + Graph taken from figure 2 of "A simple, fast dominance algorithm." (2006). + https://hdl.handle.net/1911/96345 + """ + edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)] + G = nx.DiGraph(edges) + assert nx.immediate_dominators(G, 5) == dict.fromkeys(range(1, 6), 5) + + def test_irreducible2(self): + """ + Graph taken from figure 4 of "A simple, fast dominance algorithm." (2006). + https://hdl.handle.net/1911/96345 + """ + + edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), (6, 4), (6, 5)] + G = nx.DiGraph(edges) + result = nx.immediate_dominators(G, 6) + assert result == dict.fromkeys(range(1, 7), 6) + + def test_domrel_png(self): + # Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png + edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)] + G = nx.DiGraph(edges) + result = nx.immediate_dominators(G, 1) + assert result == {1: 1, 2: 1, 3: 2, 4: 2, 5: 2, 6: 2} + # Test postdominance. + result = nx.immediate_dominators(G.reverse(copy=False), 6) + assert result == {1: 2, 2: 6, 3: 5, 4: 5, 5: 2, 6: 6} + + def test_boost_example(self): + # Graph taken from Figure 1 of + # http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm + edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), (5, 7), (6, 4)] + G = nx.DiGraph(edges) + result = nx.immediate_dominators(G, 0) + assert result == {0: 0, 1: 0, 2: 1, 3: 1, 4: 3, 5: 4, 6: 4, 7: 1} + # Test postdominance. + result = nx.immediate_dominators(G.reverse(copy=False), 7) + assert result == {0: 1, 1: 7, 2: 7, 3: 4, 4: 5, 5: 7, 6: 4, 7: 7} + + +class TestDominanceFrontiers: + def test_exceptions(self): + G = nx.Graph() + G.add_node(0) + pytest.raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0) + G = nx.MultiGraph(G) + pytest.raises(nx.NetworkXNotImplemented, nx.dominance_frontiers, G, 0) + G = nx.DiGraph([[0, 0]]) + pytest.raises(nx.NetworkXError, nx.dominance_frontiers, G, 1) + + def test_singleton(self): + G = nx.DiGraph() + G.add_node(0) + assert nx.dominance_frontiers(G, 0) == {0: set()} + G.add_edge(0, 0) + assert nx.dominance_frontiers(G, 0) == {0: set()} + + def test_path(self): + n = 5 + G = nx.path_graph(n, create_using=nx.DiGraph()) + assert nx.dominance_frontiers(G, 0) == {i: set() for i in range(n)} + + def test_cycle(self): + n = 5 + G = nx.cycle_graph(n, create_using=nx.DiGraph()) + assert nx.dominance_frontiers(G, 0) == {i: set() for i in range(n)} + + def test_unreachable(self): + n = 5 + assert n > 1 + G = nx.path_graph(n, create_using=nx.DiGraph()) + assert nx.dominance_frontiers(G, n // 2) == {i: set() for i in range(n // 2, n)} + + def test_irreducible1(self): + """ + Graph taken from figure 2 of "A simple, fast dominance algorithm." (2006). + https://hdl.handle.net/1911/96345 + """ + edges = [(1, 2), (2, 1), (3, 2), (4, 1), (5, 3), (5, 4)] + G = nx.DiGraph(edges) + assert nx.dominance_frontiers(G, 5) == { + 1: {2}, + 2: {1}, + 3: {2}, + 4: {1}, + 5: set(), + } + + def test_irreducible2(self): + """ + Graph taken from figure 4 of "A simple, fast dominance algorithm." (2006). + https://hdl.handle.net/1911/96345 + """ + edges = [(1, 2), (2, 1), (2, 3), (3, 2), (4, 2), (4, 3), (5, 1), (6, 4), (6, 5)] + G = nx.DiGraph(edges) + assert nx.dominance_frontiers(G, 6) == { + 1: {2}, + 2: {1, 3}, + 3: {2}, + 4: {2, 3}, + 5: {1}, + 6: set(), + } + + def test_domrel_png(self): + # Graph taken from https://commons.wikipedia.org/wiki/File:Domrel.png + edges = [(1, 2), (2, 3), (2, 4), (2, 6), (3, 5), (4, 5), (5, 2)] + G = nx.DiGraph(edges) + assert nx.dominance_frontiers(G, 1) == { + 1: set(), + 2: {2}, + 3: {5}, + 4: {5}, + 5: {2}, + 6: set(), + } + # Test postdominance. + result = nx.dominance_frontiers(G.reverse(copy=False), 6) + assert result == {1: set(), 2: {2}, 3: {2}, 4: {2}, 5: {2}, 6: set()} + + def test_boost_example(self): + # Graph taken from Figure 1 of + # http://www.boost.org/doc/libs/1_56_0/libs/graph/doc/lengauer_tarjan_dominator.htm + edges = [(0, 1), (1, 2), (1, 3), (2, 7), (3, 4), (4, 5), (4, 6), (5, 7), (6, 4)] + G = nx.DiGraph(edges) + assert nx.dominance_frontiers(G, 0) == { + 0: set(), + 1: set(), + 2: {7}, + 3: {7}, + 4: {4, 7}, + 5: {7}, + 6: {4}, + 7: set(), + } + # Test postdominance. + result = nx.dominance_frontiers(G.reverse(copy=False), 7) + expected = { + 0: set(), + 1: set(), + 2: {1}, + 3: {1}, + 4: {1, 4}, + 5: {1}, + 6: {4}, + 7: set(), + } + assert result == expected + + def test_discard_issue(self): + # https://github.com/networkx/networkx/issues/2071 + g = nx.DiGraph() + g.add_edges_from( + [ + ("b0", "b1"), + ("b1", "b2"), + ("b2", "b3"), + ("b3", "b1"), + ("b1", "b5"), + ("b5", "b6"), + ("b5", "b8"), + ("b6", "b7"), + ("b8", "b7"), + ("b7", "b3"), + ("b3", "b4"), + ] + ) + df = nx.dominance_frontiers(g, "b0") + assert df == { + "b4": set(), + "b5": {"b3"}, + "b6": {"b7"}, + "b7": {"b3"}, + "b0": set(), + "b1": {"b1"}, + "b2": {"b3"}, + "b3": {"b1"}, + "b8": {"b7"}, + } + + def test_loop(self): + g = nx.DiGraph() + g.add_edges_from([("a", "b"), ("b", "c"), ("b", "a")]) + df = nx.dominance_frontiers(g, "a") + assert df == {"a": set(), "b": set(), "c": set()} + + def test_missing_immediate_doms(self): + # see https://github.com/networkx/networkx/issues/2070 + g = nx.DiGraph() + edges = [ + ("entry_1", "b1"), + ("b1", "b2"), + ("b2", "b3"), + ("b3", "exit"), + ("entry_2", "b3"), + ] + + # entry_1 + # | + # b1 + # | + # b2 entry_2 + # | / + # b3 + # | + # exit + + g.add_edges_from(edges) + # formerly raised KeyError on entry_2 when parsing b3 + # because entry_2 does not have immediate doms (no path) + nx.dominance_frontiers(g, "entry_1") + + def test_loops_larger(self): + # from + # http://ecee.colorado.edu/~waite/Darmstadt/motion.html + g = nx.DiGraph() + edges = [ + ("entry", "exit"), + ("entry", "1"), + ("1", "2"), + ("2", "3"), + ("3", "4"), + ("4", "5"), + ("5", "6"), + ("6", "exit"), + ("6", "2"), + ("5", "3"), + ("4", "4"), + ] + + g.add_edges_from(edges) + df = nx.dominance_frontiers(g, "entry") + answer = { + "entry": set(), + "1": {"exit"}, + "2": {"exit", "2"}, + "3": {"exit", "3", "2"}, + "4": {"exit", "4", "3", "2"}, + "5": {"exit", "3", "2"}, + "6": {"exit", "2"}, + "exit": set(), + } + for n in df: + assert set(df[n]) == set(answer[n]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominating.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominating.py new file mode 100644 index 0000000..5f51777 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_dominating.py @@ -0,0 +1,115 @@ +import pytest + +import networkx as nx + + +def test_dominating_set(): + G = nx.gnp_random_graph(100, 0.1) + D = nx.dominating_set(G) + assert nx.is_dominating_set(G, D) + D = nx.dominating_set(G, start_with=0) + assert nx.is_dominating_set(G, D) + + +def test_complete(): + """In complete graphs each node is a dominating set. + Thus the dominating set has to be of cardinality 1. + """ + K4 = nx.complete_graph(4) + assert len(nx.dominating_set(K4)) == 1 + K5 = nx.complete_graph(5) + assert len(nx.dominating_set(K5)) == 1 + + +def test_raise_dominating_set(): + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + D = nx.dominating_set(G, start_with=10) + + +def test_is_dominating_set(): + G = nx.path_graph(4) + d = {1, 3} + assert nx.is_dominating_set(G, d) + d = {0, 2} + assert nx.is_dominating_set(G, d) + d = {1} + assert not nx.is_dominating_set(G, d) + + +def test_wikipedia_is_dominating_set(): + """Example from https://en.wikipedia.org/wiki/Dominating_set""" + G = nx.cycle_graph(4) + G.add_edges_from([(0, 4), (1, 4), (2, 5)]) + assert nx.is_dominating_set(G, {4, 3, 5}) + assert nx.is_dominating_set(G, {0, 2}) + assert nx.is_dominating_set(G, {1, 2}) + + +def test_is_connected_dominating_set(): + G = nx.path_graph(4) + D = {1, 2} + assert nx.is_connected_dominating_set(G, D) + D = {1, 3} + assert not nx.is_connected_dominating_set(G, D) + D = {2, 3} + assert nx.is_connected(nx.subgraph(G, D)) + assert not nx.is_connected_dominating_set(G, D) + + +def test_null_graph_connected_dominating_set(): + G = nx.Graph() + assert 0 == len(nx.connected_dominating_set(G)) + + +def test_single_node_graph_connected_dominating_set(): + G = nx.Graph() + G.add_node(1) + CD = nx.connected_dominating_set(G) + assert nx.is_connected_dominating_set(G, CD) + + +def test_raise_disconnected_graph_connected_dominating_set(): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + nx.connected_dominating_set(G) + + +def test_complete_graph_connected_dominating_set(): + K5 = nx.complete_graph(5) + assert 1 == len(nx.connected_dominating_set(K5)) + K7 = nx.complete_graph(7) + assert 1 == len(nx.connected_dominating_set(K7)) + + +def test_docstring_example_connected_dominating_set(): + G = nx.Graph( + [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (1, 6), + (2, 7), + (3, 8), + (4, 9), + (5, 10), + (6, 11), + (7, 12), + (8, 12), + (9, 12), + (10, 12), + (11, 12), + ] + ) + assert {1, 2, 3, 4, 5, 6, 7} == nx.connected_dominating_set(G) + + +@pytest.mark.parametrize("seed", [1, 13, 29]) +@pytest.mark.parametrize("n,k,p", [(10, 3, 0.2), (100, 10, 0.7), (1000, 50, 0.5)]) +def test_connected_watts_strogatz_graph_connected_dominating_set(n, k, p, seed): + G = nx.connected_watts_strogatz_graph(n, k, p, seed=seed) + D = nx.connected_dominating_set(G) + assert nx.is_connected_dominating_set(G, D) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_efficiency.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_efficiency.py new file mode 100644 index 0000000..9a2e7d0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_efficiency.py @@ -0,0 +1,58 @@ +"""Unit tests for the :mod:`networkx.algorithms.efficiency` module.""" + +import networkx as nx + + +class TestEfficiency: + def setup_method(self): + # G1 is a disconnected graph + self.G1 = nx.Graph() + self.G1.add_nodes_from([1, 2, 3]) + # G2 is a cycle graph + self.G2 = nx.cycle_graph(4) + # G3 is the triangle graph with one additional edge + self.G3 = nx.lollipop_graph(3, 1) + + def test_efficiency_disconnected_nodes(self): + """ + When nodes are disconnected, efficiency is 0 + """ + assert nx.efficiency(self.G1, 1, 2) == 0 + + def test_local_efficiency_disconnected_graph(self): + """ + In a disconnected graph the efficiency is 0 + """ + assert nx.local_efficiency(self.G1) == 0 + + def test_efficiency(self): + assert nx.efficiency(self.G2, 0, 1) == 1 + assert nx.efficiency(self.G2, 0, 2) == 1 / 2 + + def test_global_efficiency(self): + assert nx.global_efficiency(self.G2) == 5 / 6 + + def test_global_efficiency_complete_graph(self): + """ + Tests that the average global efficiency of the complete graph is one. + """ + for n in range(2, 10): + G = nx.complete_graph(n) + assert nx.global_efficiency(G) == 1 + + def test_local_efficiency_complete_graph(self): + """ + Test that the local efficiency for a complete graph with at least 3 + nodes should be one. For a graph with only 2 nodes, the induced + subgraph has no edges. + """ + for n in range(3, 10): + G = nx.complete_graph(n) + assert nx.local_efficiency(G) == 1 + + def test_using_ego_graph(self): + """ + Test that the ego graph is used when computing local efficiency. + For more information, see GitHub issue #2710. + """ + assert nx.local_efficiency(self.G3) == 7 / 12 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_euler.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_euler.py new file mode 100644 index 0000000..b5871f0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_euler.py @@ -0,0 +1,314 @@ +import collections + +import pytest + +import networkx as nx + + +@pytest.mark.parametrize("f", (nx.is_eulerian, nx.is_semieulerian)) +def test_empty_graph_raises(f): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Connectivity is undefined"): + f(G) + + +class TestIsEulerian: + def test_is_eulerian(self): + assert nx.is_eulerian(nx.complete_graph(5)) + assert nx.is_eulerian(nx.complete_graph(7)) + assert nx.is_eulerian(nx.hypercube_graph(4)) + assert nx.is_eulerian(nx.hypercube_graph(6)) + + assert not nx.is_eulerian(nx.complete_graph(4)) + assert not nx.is_eulerian(nx.complete_graph(6)) + assert not nx.is_eulerian(nx.hypercube_graph(3)) + assert not nx.is_eulerian(nx.hypercube_graph(5)) + + assert not nx.is_eulerian(nx.petersen_graph()) + assert not nx.is_eulerian(nx.path_graph(4)) + + def test_is_eulerian2(self): + # not connected + G = nx.Graph() + G.add_nodes_from([1, 2, 3]) + assert not nx.is_eulerian(G) + # not strongly connected + G = nx.DiGraph() + G.add_nodes_from([1, 2, 3]) + assert not nx.is_eulerian(G) + G = nx.MultiDiGraph() + G.add_edge(1, 2) + G.add_edge(2, 3) + G.add_edge(2, 3) + G.add_edge(3, 1) + assert not nx.is_eulerian(G) + + +class TestEulerianCircuit: + def test_eulerian_circuit_cycle(self): + G = nx.cycle_graph(4) + + edges = list(nx.eulerian_circuit(G, source=0)) + nodes = [u for u, v in edges] + assert nodes == [0, 3, 2, 1] + assert edges == [(0, 3), (3, 2), (2, 1), (1, 0)] + + edges = list(nx.eulerian_circuit(G, source=1)) + nodes = [u for u, v in edges] + assert nodes == [1, 2, 3, 0] + assert edges == [(1, 2), (2, 3), (3, 0), (0, 1)] + + G = nx.complete_graph(3) + + edges = list(nx.eulerian_circuit(G, source=0)) + nodes = [u for u, v in edges] + assert nodes == [0, 2, 1] + assert edges == [(0, 2), (2, 1), (1, 0)] + + edges = list(nx.eulerian_circuit(G, source=1)) + nodes = [u for u, v in edges] + assert nodes == [1, 2, 0] + assert edges == [(1, 2), (2, 0), (0, 1)] + + def test_eulerian_circuit_digraph(self): + G = nx.DiGraph() + nx.add_cycle(G, [0, 1, 2, 3]) + + edges = list(nx.eulerian_circuit(G, source=0)) + nodes = [u for u, v in edges] + assert nodes == [0, 1, 2, 3] + assert edges == [(0, 1), (1, 2), (2, 3), (3, 0)] + + edges = list(nx.eulerian_circuit(G, source=1)) + nodes = [u for u, v in edges] + assert nodes == [1, 2, 3, 0] + assert edges == [(1, 2), (2, 3), (3, 0), (0, 1)] + + def test_multigraph(self): + G = nx.MultiGraph() + nx.add_cycle(G, [0, 1, 2, 3]) + G.add_edge(1, 2) + G.add_edge(1, 2) + edges = list(nx.eulerian_circuit(G, source=0)) + nodes = [u for u, v in edges] + assert nodes == [0, 3, 2, 1, 2, 1] + assert edges == [(0, 3), (3, 2), (2, 1), (1, 2), (2, 1), (1, 0)] + + def test_multigraph_with_keys(self): + G = nx.MultiGraph() + nx.add_cycle(G, [0, 1, 2, 3]) + G.add_edge(1, 2) + G.add_edge(1, 2) + edges = list(nx.eulerian_circuit(G, source=0, keys=True)) + nodes = [u for u, v, k in edges] + assert nodes == [0, 3, 2, 1, 2, 1] + assert edges[:2] == [(0, 3, 0), (3, 2, 0)] + assert collections.Counter(edges[2:5]) == collections.Counter( + [(2, 1, 0), (1, 2, 1), (2, 1, 2)] + ) + assert edges[5:] == [(1, 0, 0)] + + def test_not_eulerian(self): + with pytest.raises(nx.NetworkXError): + f = list(nx.eulerian_circuit(nx.complete_graph(4))) + + +class TestIsSemiEulerian: + def test_is_semieulerian(self): + # Test graphs with Eulerian paths but no cycles return True. + assert nx.is_semieulerian(nx.path_graph(4)) + G = nx.path_graph(6, create_using=nx.DiGraph) + assert nx.is_semieulerian(G) + + # Test graphs with Eulerian cycles return False. + assert not nx.is_semieulerian(nx.complete_graph(5)) + assert not nx.is_semieulerian(nx.complete_graph(7)) + assert not nx.is_semieulerian(nx.hypercube_graph(4)) + assert not nx.is_semieulerian(nx.hypercube_graph(6)) + + +class TestHasEulerianPath: + def test_has_eulerian_path_cyclic(self): + # Test graphs with Eulerian cycles return True. + assert nx.has_eulerian_path(nx.complete_graph(5)) + assert nx.has_eulerian_path(nx.complete_graph(7)) + assert nx.has_eulerian_path(nx.hypercube_graph(4)) + assert nx.has_eulerian_path(nx.hypercube_graph(6)) + + def test_has_eulerian_path_non_cyclic(self): + # Test graphs with Eulerian paths but no cycles return True. + assert nx.has_eulerian_path(nx.path_graph(4)) + G = nx.path_graph(6, create_using=nx.DiGraph) + assert nx.has_eulerian_path(G) + + def test_has_eulerian_path_directed_graph(self): + # Test directed graphs and returns False + G = nx.DiGraph() + G.add_edges_from([(0, 1), (1, 2), (0, 2)]) + assert not nx.has_eulerian_path(G) + + # Test directed graphs without isolated node returns True + G = nx.DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 0)]) + assert nx.has_eulerian_path(G) + + # Test directed graphs with isolated node returns False + G.add_node(3) + assert not nx.has_eulerian_path(G) + + @pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph())) + def test_has_eulerian_path_not_weakly_connected(self, G): + G.add_edges_from([(0, 1), (2, 3), (3, 2)]) + assert not nx.has_eulerian_path(G) + + @pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph())) + def test_has_eulerian_path_unbalancedins_more_than_one(self, G): + G.add_edges_from([(0, 1), (2, 3)]) + assert not nx.has_eulerian_path(G) + + +class TestFindPathStart: + def testfind_path_start(self): + find_path_start = nx.algorithms.euler._find_path_start + # Test digraphs return correct starting node. + G = nx.path_graph(6, create_using=nx.DiGraph) + assert find_path_start(G) == 0 + edges = [(0, 1), (1, 2), (2, 0), (4, 0)] + assert find_path_start(nx.DiGraph(edges)) == 4 + + # Test graph with no Eulerian path return None. + edges = [(0, 1), (1, 2), (2, 3), (2, 4)] + assert find_path_start(nx.DiGraph(edges)) is None + + +class TestEulerianPath: + def test_eulerian_path(self): + x = [(4, 0), (0, 1), (1, 2), (2, 0)] + for e1, e2 in zip(x, nx.eulerian_path(nx.DiGraph(x))): + assert e1 == e2 + + def test_eulerian_path_straight_link(self): + G = nx.DiGraph() + result = [(1, 2), (2, 3), (3, 4), (4, 5)] + G.add_edges_from(result) + assert result == list(nx.eulerian_path(G)) + assert result == list(nx.eulerian_path(G, source=1)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=3)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=4)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=5)) + + def test_eulerian_path_multigraph(self): + G = nx.MultiDiGraph() + result = [(2, 1), (1, 2), (2, 1), (1, 2), (2, 3), (3, 4), (4, 3)] + G.add_edges_from(result) + assert result == list(nx.eulerian_path(G)) + assert result == list(nx.eulerian_path(G, source=2)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=3)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=4)) + + def test_eulerian_path_eulerian_circuit(self): + G = nx.DiGraph() + result = [(1, 2), (2, 3), (3, 4), (4, 1)] + result2 = [(2, 3), (3, 4), (4, 1), (1, 2)] + result3 = [(3, 4), (4, 1), (1, 2), (2, 3)] + G.add_edges_from(result) + assert result == list(nx.eulerian_path(G)) + assert result == list(nx.eulerian_path(G, source=1)) + assert result2 == list(nx.eulerian_path(G, source=2)) + assert result3 == list(nx.eulerian_path(G, source=3)) + + def test_eulerian_path_undirected(self): + G = nx.Graph() + result = [(1, 2), (2, 3), (3, 4), (4, 5)] + result2 = [(5, 4), (4, 3), (3, 2), (2, 1)] + G.add_edges_from(result) + assert list(nx.eulerian_path(G)) in (result, result2) + assert result == list(nx.eulerian_path(G, source=1)) + assert result2 == list(nx.eulerian_path(G, source=5)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=3)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=2)) + + def test_eulerian_path_multigraph_undirected(self): + G = nx.MultiGraph() + result = [(2, 1), (1, 2), (2, 1), (1, 2), (2, 3), (3, 4)] + G.add_edges_from(result) + assert result == list(nx.eulerian_path(G)) + assert result == list(nx.eulerian_path(G, source=2)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=3)) + with pytest.raises(nx.NetworkXError): + list(nx.eulerian_path(G, source=1)) + + @pytest.mark.parametrize( + ("graph_type", "result"), + ( + (nx.MultiGraph, [(0, 1, 0), (1, 0, 1)]), + (nx.MultiDiGraph, [(0, 1, 0), (1, 0, 0)]), + ), + ) + def test_eulerian_with_keys(self, graph_type, result): + G = graph_type([(0, 1), (1, 0)]) + answer = nx.eulerian_path(G, keys=True) + assert list(answer) == result + + +class TestEulerize: + def test_disconnected(self): + with pytest.raises(nx.NetworkXError): + G = nx.from_edgelist([(0, 1), (2, 3)]) + nx.eulerize(G) + + def test_null_graph(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.eulerize(nx.Graph()) + + def test_null_multigraph(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.eulerize(nx.MultiGraph()) + + def test_on_empty_graph(self): + with pytest.raises(nx.NetworkXError): + nx.eulerize(nx.empty_graph(3)) + + def test_on_eulerian(self): + G = nx.cycle_graph(3) + H = nx.eulerize(G) + assert nx.is_isomorphic(G, H) + + def test_on_eulerian_multigraph(self): + G = nx.MultiGraph(nx.cycle_graph(3)) + G.add_edge(0, 1) + H = nx.eulerize(G) + assert nx.is_eulerian(H) + + def test_on_complete_graph(self): + G = nx.complete_graph(4) + assert nx.is_eulerian(nx.eulerize(G)) + assert nx.is_eulerian(nx.eulerize(nx.MultiGraph(G))) + + def test_on_non_eulerian_graph(self): + G = nx.cycle_graph(18) + G.add_edge(0, 18) + G.add_edge(18, 19) + G.add_edge(17, 19) + G.add_edge(4, 20) + G.add_edge(20, 21) + G.add_edge(21, 22) + G.add_edge(22, 23) + G.add_edge(23, 24) + G.add_edge(24, 25) + G.add_edge(25, 26) + G.add_edge(26, 27) + G.add_edge(27, 28) + G.add_edge(28, 13) + assert not nx.is_eulerian(G) + G = nx.eulerize(G) + assert nx.is_eulerian(G) + assert nx.number_of_edges(G) == 39 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graph_hashing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graph_hashing.py new file mode 100644 index 0000000..6c90c8f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graph_hashing.py @@ -0,0 +1,872 @@ +import copy + +import pytest + +import networkx as nx + +# Unit tests relevant for both functions in this module. + + +def test_positive_iters(): + G1 = nx.empty_graph() + with pytest.raises( + ValueError, + match="The WL algorithm requires that `iterations` be positive", + ): + nx.weisfeiler_lehman_graph_hash(G1, iterations=-3) + with pytest.raises( + ValueError, + match="The WL algorithm requires that `iterations` be positive", + ): + nx.weisfeiler_lehman_subgraph_hashes(G1, iterations=-3) + with pytest.raises( + ValueError, + match="The WL algorithm requires that `iterations` be positive", + ): + nx.weisfeiler_lehman_graph_hash(G1, iterations=0) + with pytest.raises( + ValueError, + match="The WL algorithm requires that `iterations` be positive", + ): + nx.weisfeiler_lehman_subgraph_hashes(G1, iterations=0) + + +# Unit tests for the :func:`~networkx.weisfeiler_lehman_graph_hash` function + + +def test_empty_graph_hash(): + """ + empty graphs should give hashes regardless of other params + """ + G1 = nx.empty_graph() + G2 = nx.empty_graph() + + h1 = nx.weisfeiler_lehman_graph_hash(G1) + h2 = nx.weisfeiler_lehman_graph_hash(G2) + h3 = nx.weisfeiler_lehman_graph_hash(G2, edge_attr="edge_attr1") + h4 = nx.weisfeiler_lehman_graph_hash(G2, node_attr="node_attr1") + h5 = nx.weisfeiler_lehman_graph_hash( + G2, edge_attr="edge_attr1", node_attr="node_attr1" + ) + h6 = nx.weisfeiler_lehman_graph_hash(G2, iterations=10) + + assert h1 == h2 + assert h1 == h3 + assert h1 == h4 + assert h1 == h5 + assert h1 == h6 + + +def test_directed(): + """ + A directed graph with no bi-directional edges should yield different a graph hash + to the same graph taken as undirected if there are no hash collisions. + """ + r = 10 + for i in range(r): + G_directed = nx.gn_graph(10 + r, seed=100 + i) + G_undirected = nx.to_undirected(G_directed) + + h_directed = nx.weisfeiler_lehman_graph_hash(G_directed) + h_undirected = nx.weisfeiler_lehman_graph_hash(G_undirected) + + assert h_directed != h_undirected + + +def test_reversed(): + """ + A directed graph with no bi-directional edges should yield different a graph hash + to the same graph taken with edge directions reversed if there are no hash + collisions. Here we test a cycle graph which is the minimal counterexample + """ + G = nx.cycle_graph(5, create_using=nx.DiGraph) + nx.set_node_attributes(G, {n: str(n) for n in G.nodes()}, name="label") + + G_reversed = G.reverse() + + h = nx.weisfeiler_lehman_graph_hash(G, node_attr="label") + h_reversed = nx.weisfeiler_lehman_graph_hash(G_reversed, node_attr="label") + + assert h != h_reversed + + +def test_isomorphic(): + """ + graph hashes should be invariant to node-relabeling (when the output is reindexed + by the same mapping) + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=200 + i) + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + g1_hash = nx.weisfeiler_lehman_graph_hash(G1) + g2_hash = nx.weisfeiler_lehman_graph_hash(G2) + + assert g1_hash == g2_hash + + +def test_isomorphic_edge_attr(): + """ + Isomorphic graphs with differing edge attributes should yield different graph + hashes if the 'edge_attr' argument is supplied and populated in the graph, + and there are no hash collisions. + The output should still be invariant to node-relabeling + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=300 + i) + + for a, b in G1.edges: + G1[a][b]["edge_attr1"] = f"{a}-{b}-1" + G1[a][b]["edge_attr2"] = f"{a}-{b}-2" + + g1_hash_with_edge_attr1 = nx.weisfeiler_lehman_graph_hash( + G1, edge_attr="edge_attr1" + ) + g1_hash_with_edge_attr2 = nx.weisfeiler_lehman_graph_hash( + G1, edge_attr="edge_attr2" + ) + g1_hash_no_edge_attr = nx.weisfeiler_lehman_graph_hash(G1, edge_attr=None) + + assert g1_hash_with_edge_attr1 != g1_hash_no_edge_attr + assert g1_hash_with_edge_attr2 != g1_hash_no_edge_attr + assert g1_hash_with_edge_attr1 != g1_hash_with_edge_attr2 + + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + g2_hash_with_edge_attr1 = nx.weisfeiler_lehman_graph_hash( + G2, edge_attr="edge_attr1" + ) + g2_hash_with_edge_attr2 = nx.weisfeiler_lehman_graph_hash( + G2, edge_attr="edge_attr2" + ) + + assert g1_hash_with_edge_attr1 == g2_hash_with_edge_attr1 + assert g1_hash_with_edge_attr2 == g2_hash_with_edge_attr2 + + +def test_missing_edge_attr(): + """ + If the 'edge_attr' argument is supplied but is missing from an edge in the graph, + we should raise a KeyError + """ + G = nx.Graph() + G.add_edges_from([(1, 2, {"edge_attr1": "a"}), (1, 3, {})]) + pytest.raises(KeyError, nx.weisfeiler_lehman_graph_hash, G, edge_attr="edge_attr1") + + +def test_isomorphic_node_attr(): + """ + Isomorphic graphs with differing node attributes should yield different graph + hashes if the 'node_attr' argument is supplied and populated in the graph, and + there are no hash collisions. + The output should still be invariant to node-relabeling + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=400 + i) + + for u in G1.nodes(): + G1.nodes[u]["node_attr1"] = f"{u}-1" + G1.nodes[u]["node_attr2"] = f"{u}-2" + + g1_hash_with_node_attr1 = nx.weisfeiler_lehman_graph_hash( + G1, node_attr="node_attr1" + ) + g1_hash_with_node_attr2 = nx.weisfeiler_lehman_graph_hash( + G1, node_attr="node_attr2" + ) + g1_hash_no_node_attr = nx.weisfeiler_lehman_graph_hash(G1, node_attr=None) + + assert g1_hash_with_node_attr1 != g1_hash_no_node_attr + assert g1_hash_with_node_attr2 != g1_hash_no_node_attr + assert g1_hash_with_node_attr1 != g1_hash_with_node_attr2 + + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + g2_hash_with_node_attr1 = nx.weisfeiler_lehman_graph_hash( + G2, node_attr="node_attr1" + ) + g2_hash_with_node_attr2 = nx.weisfeiler_lehman_graph_hash( + G2, node_attr="node_attr2" + ) + + assert g1_hash_with_node_attr1 == g2_hash_with_node_attr1 + assert g1_hash_with_node_attr2 == g2_hash_with_node_attr2 + + +def test_missing_node_attr(): + """ + If the 'node_attr' argument is supplied but is missing from a node in the graph, + we should raise a KeyError + """ + G = nx.Graph() + G.add_nodes_from([(1, {"node_attr1": "a"}), (2, {})]) + G.add_edges_from([(1, 2), (2, 3), (3, 1), (1, 4)]) + pytest.raises(KeyError, nx.weisfeiler_lehman_graph_hash, G, node_attr="node_attr1") + + +def test_isomorphic_edge_attr_and_node_attr(): + """ + Isomorphic graphs with differing node attributes should yield different graph + hashes if the 'node_attr' and 'edge_attr' argument is supplied and populated in + the graph, and there are no hash collisions. + The output should still be invariant to node-relabeling + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=500 + i) + + for u in G1.nodes(): + G1.nodes[u]["node_attr1"] = f"{u}-1" + G1.nodes[u]["node_attr2"] = f"{u}-2" + + for a, b in G1.edges: + G1[a][b]["edge_attr1"] = f"{a}-{b}-1" + G1[a][b]["edge_attr2"] = f"{a}-{b}-2" + + g1_hash_edge1_node1 = nx.weisfeiler_lehman_graph_hash( + G1, edge_attr="edge_attr1", node_attr="node_attr1" + ) + g1_hash_edge2_node2 = nx.weisfeiler_lehman_graph_hash( + G1, edge_attr="edge_attr2", node_attr="node_attr2" + ) + g1_hash_edge1_node2 = nx.weisfeiler_lehman_graph_hash( + G1, edge_attr="edge_attr1", node_attr="node_attr2" + ) + g1_hash_no_attr = nx.weisfeiler_lehman_graph_hash(G1) + + assert g1_hash_edge1_node1 != g1_hash_no_attr + assert g1_hash_edge2_node2 != g1_hash_no_attr + assert g1_hash_edge1_node1 != g1_hash_edge2_node2 + assert g1_hash_edge1_node2 != g1_hash_edge2_node2 + assert g1_hash_edge1_node2 != g1_hash_edge1_node1 + + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + g2_hash_edge1_node1 = nx.weisfeiler_lehman_graph_hash( + G2, edge_attr="edge_attr1", node_attr="node_attr1" + ) + g2_hash_edge2_node2 = nx.weisfeiler_lehman_graph_hash( + G2, edge_attr="edge_attr2", node_attr="node_attr2" + ) + + assert g1_hash_edge1_node1 == g2_hash_edge1_node1 + assert g1_hash_edge2_node2 == g2_hash_edge2_node2 + + +def test_digest_size(): + """ + The hash string lengths should be as expected for a variety of graphs and + digest sizes + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G = nx.erdos_renyi_graph(n, p * i, seed=1000 + i) + + h16 = nx.weisfeiler_lehman_graph_hash(G) + h32 = nx.weisfeiler_lehman_graph_hash(G, digest_size=32) + + assert h16 != h32 + assert len(h16) == 16 * 2 + assert len(h32) == 32 * 2 + + +def test_directed_bugs(): + """ + These were bugs for directed graphs as discussed in issue #7806 + """ + Ga = nx.DiGraph() + Gb = nx.DiGraph() + Ga.add_nodes_from([1, 2, 3, 4]) + Gb.add_nodes_from([1, 2, 3, 4]) + Ga.add_edges_from([(1, 2), (3, 2)]) + Gb.add_edges_from([(1, 2), (3, 4)]) + Ga_hash = nx.weisfeiler_lehman_graph_hash(Ga) + Gb_hash = nx.weisfeiler_lehman_graph_hash(Gb) + assert Ga_hash != Gb_hash + + Tree1 = nx.DiGraph() + Tree1.add_edges_from([(0, 4), (1, 5), (2, 6), (3, 7)]) + Tree1.add_edges_from([(4, 8), (5, 8), (6, 9), (7, 9)]) + Tree1.add_edges_from([(8, 10), (9, 10)]) + nx.set_node_attributes( + Tree1, {10: "s", 8: "a", 9: "a", 4: "b", 5: "b", 6: "b", 7: "b"}, "weight" + ) + Tree2 = copy.deepcopy(Tree1) + nx.set_node_attributes(Tree1, {0: "d", 1: "c", 2: "d", 3: "c"}, "weight") + nx.set_node_attributes(Tree2, {0: "d", 1: "d", 2: "c", 3: "c"}, "weight") + Tree1_hash_short = nx.weisfeiler_lehman_graph_hash( + Tree1, iterations=1, node_attr="weight" + ) + Tree2_hash_short = nx.weisfeiler_lehman_graph_hash( + Tree2, iterations=1, node_attr="weight" + ) + assert Tree1_hash_short == Tree2_hash_short + Tree1_hash = nx.weisfeiler_lehman_graph_hash( + Tree1, node_attr="weight" + ) # Default is 3 iterations + Tree2_hash = nx.weisfeiler_lehman_graph_hash(Tree2, node_attr="weight") + assert Tree1_hash != Tree2_hash + + +def test_trivial_labels_isomorphism(): + """ + Trivial labelling of the graph should not change isomorphism verdicts. + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=500 + i) + G2 = nx.erdos_renyi_graph(n, p * i, seed=42 + i) + G1_hash = nx.weisfeiler_lehman_graph_hash(G1) + G2_hash = nx.weisfeiler_lehman_graph_hash(G2) + equal = G1_hash == G2_hash + + nx.set_node_attributes(G1, values=1, name="weight") + nx.set_node_attributes(G2, values=1, name="weight") + G1_hash_node = nx.weisfeiler_lehman_graph_hash(G1, node_attr="weight") + G2_hash_node = nx.weisfeiler_lehman_graph_hash(G2, node_attr="weight") + equal_node = G1_hash_node == G2_hash_node + + nx.set_edge_attributes(G1, values="a", name="e_weight") + nx.set_edge_attributes(G2, values="a", name="e_weight") + G1_hash_edge = nx.weisfeiler_lehman_graph_hash(G1, edge_attr="e_weight") + G2_hash_edge = nx.weisfeiler_lehman_graph_hash(G2, edge_attr="e_weight") + equal_edge = G1_hash_edge == G2_hash_edge + + G1_hash_both = nx.weisfeiler_lehman_graph_hash( + G1, edge_attr="e_weight", node_attr="weight" + ) + G2_hash_both = nx.weisfeiler_lehman_graph_hash( + G2, edge_attr="e_weight", node_attr="weight" + ) + equal_both = G1_hash_both == G2_hash_both + + assert equal == equal_node + assert equal_node == equal_edge + assert equal_edge == equal_both + + +def test_trivial_labels_isomorphism_directed(): + """ + Trivial labelling of the graph should not change isomorphism verdicts on digraphs. + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, directed=True, seed=500 + i) + G2 = nx.erdos_renyi_graph(n, p * i, directed=True, seed=42 + i) + G1_hash = nx.weisfeiler_lehman_graph_hash(G1) + G2_hash = nx.weisfeiler_lehman_graph_hash(G2) + equal = G1_hash == G2_hash + + nx.set_node_attributes(G1, values=1, name="weight") + nx.set_node_attributes(G2, values=1, name="weight") + G1_hash_node = nx.weisfeiler_lehman_graph_hash(G1, node_attr="weight") + G2_hash_node = nx.weisfeiler_lehman_graph_hash(G2, node_attr="weight") + equal_node = G1_hash_node == G2_hash_node + + nx.set_edge_attributes(G1, values="a", name="e_weight") + nx.set_edge_attributes(G2, values="a", name="e_weight") + G1_hash_edge = nx.weisfeiler_lehman_graph_hash(G1, edge_attr="e_weight") + G2_hash_edge = nx.weisfeiler_lehman_graph_hash(G2, edge_attr="e_weight") + equal_edge = G1_hash_edge == G2_hash_edge + + G1_hash_both = nx.weisfeiler_lehman_graph_hash( + G1, edge_attr="e_weight", node_attr="weight" + ) + G2_hash_both = nx.weisfeiler_lehman_graph_hash( + G2, edge_attr="e_weight", node_attr="weight" + ) + equal_both = G1_hash_both == G2_hash_both + + assert equal == equal_node + assert equal_node == equal_edge + assert equal_edge == equal_both + + # Specific case that was found to be a bug in issue #7806 + # Without weights worked + Ga = nx.DiGraph() + Ga.add_nodes_from([1, 2, 3, 4]) + Gb = copy.deepcopy(Ga) + Ga.add_edges_from([(1, 2), (3, 2)]) + Gb.add_edges_from([(1, 2), (3, 4)]) + Ga_hash = nx.weisfeiler_lehman_graph_hash(Ga) + Gb_hash = nx.weisfeiler_lehman_graph_hash(Gb) + assert Ga_hash != Gb_hash + + # Now with trivial weights + nx.set_node_attributes(Ga, values=1, name="weight") + nx.set_node_attributes(Gb, values=1, name="weight") + Ga_hash = nx.weisfeiler_lehman_graph_hash(Ga, node_attr="weight") + Gb_hash = nx.weisfeiler_lehman_graph_hash(Gb, node_attr="weight") + assert Ga_hash != Gb_hash + + +def test_trivial_labels_hashes(): + """ + Test that 'empty' labelling of nodes or edges shouldn't have a different impact + on the calculated hash. Note that we cannot assume that trivial weights have no + impact at all. Without (trivial) weights, a node will start with hashing its + degree. This step is omitted when there are weights. + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=500 + i) + nx.set_node_attributes(G1, values="", name="weight") + first = nx.weisfeiler_lehman_graph_hash(G1, node_attr="weight") + nx.set_edge_attributes(G1, values="", name="e_weight") + second = nx.weisfeiler_lehman_graph_hash(G1, edge_attr="e_weight") + assert first == second + third = nx.weisfeiler_lehman_graph_hash( + G1, edge_attr="e_weight", node_attr="weight" + ) + assert second == third + + +# Unit tests for the :func:`~networkx.weisfeiler_lehman_subgraph_hashes` function + + +def is_subiteration(a, b): + """ + returns True if that each hash sequence in 'a' is a prefix for + the corresponding sequence indexed by the same node in 'b'. + """ + return all(b[node][: len(hashes)] == hashes for node, hashes in a.items()) + + +def hexdigest_sizes_correct(a, digest_size): + """ + returns True if all hex digest sizes are the expected length in a + node:subgraph-hashes dictionary. Hex digest string length == 2 * bytes digest + length since each pair of hex digits encodes 1 byte + (https://docs.python.org/3/library/hashlib.html) + """ + hexdigest_size = digest_size * 2 + + def list_digest_sizes_correct(l): + return all(len(x) == hexdigest_size for x in l) + + return all(list_digest_sizes_correct(hashes) for hashes in a.values()) + + +def test_empty_graph_subgraph_hash(): + """ " + empty graphs should give empty dict subgraph hashes regardless of other params + """ + G = nx.empty_graph() + + subgraph_hashes1 = nx.weisfeiler_lehman_subgraph_hashes(G) + subgraph_hashes2 = nx.weisfeiler_lehman_subgraph_hashes(G, edge_attr="edge_attr") + subgraph_hashes3 = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="edge_attr") + subgraph_hashes4 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=2) + subgraph_hashes5 = nx.weisfeiler_lehman_subgraph_hashes(G, digest_size=64) + + assert subgraph_hashes1 == {} + assert subgraph_hashes2 == {} + assert subgraph_hashes3 == {} + assert subgraph_hashes4 == {} + assert subgraph_hashes5 == {} + + +def test_directed_subgraph_hash(): + """ + A directed graph with no bi-directional edges should yield different subgraph + hashes to the same graph taken as undirected, if all hashes don't collide. + """ + r = 10 + for i in range(r): + G_directed = nx.gn_graph(10 + r, seed=100 + i) + G_undirected = nx.to_undirected(G_directed) + + directed_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G_directed) + undirected_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G_undirected) + + assert directed_subgraph_hashes != undirected_subgraph_hashes + + +def test_reversed_subgraph_hash(): + """ + A directed graph with no bi-directional edges should yield different subgraph + hashes to the same graph taken with edge directions reversed if there are no + hash collisions. Here we test a cycle graph which is the minimal counterexample + """ + G = nx.cycle_graph(5, create_using=nx.DiGraph) + nx.set_node_attributes(G, {n: str(n) for n in G.nodes()}, name="label") + + G_reversed = G.reverse() + + h = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="label") + h_reversed = nx.weisfeiler_lehman_subgraph_hashes(G_reversed, node_attr="label") + + assert h != h_reversed + + +def test_isomorphic_subgraph_hash(): + """ + the subgraph hashes should be invariant to node-relabeling when the output is + reindexed by the same mapping and all hashes don't collide. + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=200 + i) + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + g1_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G1) + g2_subgraph_hashes = nx.weisfeiler_lehman_subgraph_hashes(G2) + + assert g1_subgraph_hashes == {-1 * k: v for k, v in g2_subgraph_hashes.items()} + + +def test_isomorphic_edge_attr_subgraph_hash(): + """ + Isomorphic graphs with differing edge attributes should yield different subgraph + hashes if the 'edge_attr' argument is supplied and populated in the graph, and + all hashes don't collide. + The output should still be invariant to node-relabeling + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=300 + i) + + for a, b in G1.edges: + G1[a][b]["edge_attr1"] = f"{a}-{b}-1" + G1[a][b]["edge_attr2"] = f"{a}-{b}-2" + + g1_hash_with_edge_attr1 = nx.weisfeiler_lehman_subgraph_hashes( + G1, edge_attr="edge_attr1" + ) + g1_hash_with_edge_attr2 = nx.weisfeiler_lehman_subgraph_hashes( + G1, edge_attr="edge_attr2" + ) + g1_hash_no_edge_attr = nx.weisfeiler_lehman_subgraph_hashes(G1, edge_attr=None) + + assert g1_hash_with_edge_attr1 != g1_hash_no_edge_attr + assert g1_hash_with_edge_attr2 != g1_hash_no_edge_attr + assert g1_hash_with_edge_attr1 != g1_hash_with_edge_attr2 + + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + g2_hash_with_edge_attr1 = nx.weisfeiler_lehman_subgraph_hashes( + G2, edge_attr="edge_attr1" + ) + g2_hash_with_edge_attr2 = nx.weisfeiler_lehman_subgraph_hashes( + G2, edge_attr="edge_attr2" + ) + + assert g1_hash_with_edge_attr1 == { + -1 * k: v for k, v in g2_hash_with_edge_attr1.items() + } + assert g1_hash_with_edge_attr2 == { + -1 * k: v for k, v in g2_hash_with_edge_attr2.items() + } + + +def test_missing_edge_attr_subgraph_hash(): + """ + If the 'edge_attr' argument is supplied but is missing from an edge in the graph, + we should raise a KeyError + """ + G = nx.Graph() + G.add_edges_from([(1, 2, {"edge_attr1": "a"}), (1, 3, {})]) + pytest.raises( + KeyError, nx.weisfeiler_lehman_subgraph_hashes, G, edge_attr="edge_attr1" + ) + + +def test_isomorphic_node_attr_subgraph_hash(): + """ + Isomorphic graphs with differing node attributes should yield different subgraph + hashes if the 'node_attr' argument is supplied and populated in the graph, and + all hashes don't collide. + The output should still be invariant to node-relabeling + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=400 + i) + + for u in G1.nodes(): + G1.nodes[u]["node_attr1"] = f"{u}-1" + G1.nodes[u]["node_attr2"] = f"{u}-2" + + g1_hash_with_node_attr1 = nx.weisfeiler_lehman_subgraph_hashes( + G1, node_attr="node_attr1" + ) + g1_hash_with_node_attr2 = nx.weisfeiler_lehman_subgraph_hashes( + G1, node_attr="node_attr2" + ) + g1_hash_no_node_attr = nx.weisfeiler_lehman_subgraph_hashes(G1, node_attr=None) + + assert g1_hash_with_node_attr1 != g1_hash_no_node_attr + assert g1_hash_with_node_attr2 != g1_hash_no_node_attr + assert g1_hash_with_node_attr1 != g1_hash_with_node_attr2 + + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + g2_hash_with_node_attr1 = nx.weisfeiler_lehman_subgraph_hashes( + G2, node_attr="node_attr1" + ) + g2_hash_with_node_attr2 = nx.weisfeiler_lehman_subgraph_hashes( + G2, node_attr="node_attr2" + ) + + assert g1_hash_with_node_attr1 == { + -1 * k: v for k, v in g2_hash_with_node_attr1.items() + } + assert g1_hash_with_node_attr2 == { + -1 * k: v for k, v in g2_hash_with_node_attr2.items() + } + + +def test_missing_node_attr_subgraph_hash(): + """ + If the 'node_attr' argument is supplied but is missing from a node in the graph, + we should raise a KeyError + """ + G = nx.Graph() + G.add_nodes_from([(1, {"node_attr1": "a"}), (2, {})]) + G.add_edges_from([(1, 2), (2, 3), (3, 1), (1, 4)]) + pytest.raises( + KeyError, nx.weisfeiler_lehman_subgraph_hashes, G, node_attr="node_attr1" + ) + + +def test_isomorphic_edge_attr_and_node_attr_subgraph_hash(): + """ + Isomorphic graphs with differing node attributes should yield different subgraph + hashes if the 'node_attr' and 'edge_attr' argument is supplied and populated in + the graph, and all hashes don't collide + The output should still be invariant to node-relabeling + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G1 = nx.erdos_renyi_graph(n, p * i, seed=500 + i) + + for u in G1.nodes(): + G1.nodes[u]["node_attr1"] = f"{u}-1" + G1.nodes[u]["node_attr2"] = f"{u}-2" + + for a, b in G1.edges: + G1[a][b]["edge_attr1"] = f"{a}-{b}-1" + G1[a][b]["edge_attr2"] = f"{a}-{b}-2" + + g1_hash_edge1_node1 = nx.weisfeiler_lehman_subgraph_hashes( + G1, edge_attr="edge_attr1", node_attr="node_attr1" + ) + g1_hash_edge2_node2 = nx.weisfeiler_lehman_subgraph_hashes( + G1, edge_attr="edge_attr2", node_attr="node_attr2" + ) + g1_hash_edge1_node2 = nx.weisfeiler_lehman_subgraph_hashes( + G1, edge_attr="edge_attr1", node_attr="node_attr2" + ) + g1_hash_no_attr = nx.weisfeiler_lehman_subgraph_hashes(G1) + + assert g1_hash_edge1_node1 != g1_hash_no_attr + assert g1_hash_edge2_node2 != g1_hash_no_attr + assert g1_hash_edge1_node1 != g1_hash_edge2_node2 + assert g1_hash_edge1_node2 != g1_hash_edge2_node2 + assert g1_hash_edge1_node2 != g1_hash_edge1_node1 + + G2 = nx.relabel_nodes(G1, {u: -1 * u for u in G1.nodes()}) + + g2_hash_edge1_node1 = nx.weisfeiler_lehman_subgraph_hashes( + G2, edge_attr="edge_attr1", node_attr="node_attr1" + ) + g2_hash_edge2_node2 = nx.weisfeiler_lehman_subgraph_hashes( + G2, edge_attr="edge_attr2", node_attr="node_attr2" + ) + + assert g1_hash_edge1_node1 == { + -1 * k: v for k, v in g2_hash_edge1_node1.items() + } + assert g1_hash_edge2_node2 == { + -1 * k: v for k, v in g2_hash_edge2_node2.items() + } + + +def test_iteration_depth(): + """ + All nodes should have the correct number of subgraph hashes in the output when + using degree as initial node labels. + Subsequent iteration depths for the same graph should be additive for each node + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G = nx.erdos_renyi_graph(n, p * i, seed=600 + i) + + depth3 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=3) + depth4 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=4) + depth5 = nx.weisfeiler_lehman_subgraph_hashes(G, iterations=5) + + assert all(len(hashes) == 3 for hashes in depth3.values()) + assert all(len(hashes) == 4 for hashes in depth4.values()) + assert all(len(hashes) == 5 for hashes in depth5.values()) + + assert is_subiteration(depth3, depth4) + assert is_subiteration(depth4, depth5) + assert is_subiteration(depth3, depth5) + + +def test_iteration_depth_edge_attr(): + """ + All nodes should have the correct number of subgraph hashes in the output when + setting initial node labels empty and using an edge attribute when aggregating + neighborhoods. + Subsequent iteration depths for the same graph should be additive for each node + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G = nx.erdos_renyi_graph(n, p * i, seed=700 + i) + + for a, b in G.edges: + G[a][b]["edge_attr1"] = f"{a}-{b}-1" + + depth3 = nx.weisfeiler_lehman_subgraph_hashes( + G, edge_attr="edge_attr1", iterations=3 + ) + depth4 = nx.weisfeiler_lehman_subgraph_hashes( + G, edge_attr="edge_attr1", iterations=4 + ) + depth5 = nx.weisfeiler_lehman_subgraph_hashes( + G, edge_attr="edge_attr1", iterations=5 + ) + + assert all(len(hashes) == 3 for hashes in depth3.values()) + assert all(len(hashes) == 4 for hashes in depth4.values()) + assert all(len(hashes) == 5 for hashes in depth5.values()) + + assert is_subiteration(depth3, depth4) + assert is_subiteration(depth4, depth5) + assert is_subiteration(depth3, depth5) + + +def test_iteration_depth_node_attr(): + """ + All nodes should have the correct number of subgraph hashes in the output when + setting initial node labels to an attribute. + Subsequent iteration depths for the same graph should be additive for each node + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G = nx.erdos_renyi_graph(n, p * i, seed=800 + i) + + for u in G.nodes(): + G.nodes[u]["node_attr1"] = f"{u}-1" + + depth3 = nx.weisfeiler_lehman_subgraph_hashes( + G, node_attr="node_attr1", iterations=3 + ) + depth4 = nx.weisfeiler_lehman_subgraph_hashes( + G, node_attr="node_attr1", iterations=4 + ) + depth5 = nx.weisfeiler_lehman_subgraph_hashes( + G, node_attr="node_attr1", iterations=5 + ) + + assert all(len(hashes) == 3 for hashes in depth3.values()) + assert all(len(hashes) == 4 for hashes in depth4.values()) + assert all(len(hashes) == 5 for hashes in depth5.values()) + + assert is_subiteration(depth3, depth4) + assert is_subiteration(depth4, depth5) + assert is_subiteration(depth3, depth5) + + +def test_iteration_depth_node_edge_attr(): + """ + All nodes should have the correct number of subgraph hashes in the output when + setting initial node labels to an attribute and also using an edge attribute when + aggregating neighborhoods. + Subsequent iteration depths for the same graph should be additive for each node + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G = nx.erdos_renyi_graph(n, p * i, seed=900 + i) + + for u in G.nodes(): + G.nodes[u]["node_attr1"] = f"{u}-1" + + for a, b in G.edges: + G[a][b]["edge_attr1"] = f"{a}-{b}-1" + + depth3 = nx.weisfeiler_lehman_subgraph_hashes( + G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=3 + ) + depth4 = nx.weisfeiler_lehman_subgraph_hashes( + G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=4 + ) + depth5 = nx.weisfeiler_lehman_subgraph_hashes( + G, edge_attr="edge_attr1", node_attr="node_attr1", iterations=5 + ) + + assert all(len(hashes) == 3 for hashes in depth3.values()) + assert all(len(hashes) == 4 for hashes in depth4.values()) + assert all(len(hashes) == 5 for hashes in depth5.values()) + + assert is_subiteration(depth3, depth4) + assert is_subiteration(depth4, depth5) + assert is_subiteration(depth3, depth5) + + +def test_digest_size_subgraph_hash(): + """ + The hash string lengths should be as expected for a variety of graphs and + digest sizes + """ + n, r = 100, 10 + p = 1.0 / r + for i in range(1, r + 1): + G = nx.erdos_renyi_graph(n, p * i, seed=1000 + i) + + digest_size16_hashes = nx.weisfeiler_lehman_subgraph_hashes(G) + digest_size32_hashes = nx.weisfeiler_lehman_subgraph_hashes(G, digest_size=32) + + assert digest_size16_hashes != digest_size32_hashes + + assert hexdigest_sizes_correct(digest_size16_hashes, 16) + assert hexdigest_sizes_correct(digest_size32_hashes, 32) + + +def test_initial_node_labels_subgraph_hash(): + """ + Including the hashed initial label prepends an extra hash to the lists + """ + G = nx.path_graph(5) + nx.set_node_attributes(G, {i: int(0 < i < 4) for i in G}, "label") + # initial node labels: + # 0--1--1--1--0 + + without_initial_label = nx.weisfeiler_lehman_subgraph_hashes(G, node_attr="label") + assert all(len(v) == 3 for v in without_initial_label.values()) + # 3 different 1 hop nhds + assert len({v[0] for v in without_initial_label.values()}) == 3 + + with_initial_label = nx.weisfeiler_lehman_subgraph_hashes( + G, node_attr="label", include_initial_labels=True + ) + assert all(len(v) == 4 for v in with_initial_label.values()) + # 2 different initial labels + assert len({v[0] for v in with_initial_label.values()}) == 2 + + # check hashes match otherwise + for u in G: + for a, b in zip( + with_initial_label[u][1:], without_initial_label[u], strict=True + ): + assert a == b diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graphical.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graphical.py new file mode 100644 index 0000000..99f766f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_graphical.py @@ -0,0 +1,163 @@ +import pytest + +import networkx as nx + + +def test_valid_degree_sequence1(): + n = 100 + p = 0.3 + for i in range(10): + G = nx.erdos_renyi_graph(n, p) + deg = (d for n, d in G.degree()) + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") + + +def test_valid_degree_sequence2(): + n = 100 + for i in range(10): + G = nx.barabasi_albert_graph(n, 1) + deg = (d for n, d in G.degree()) + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") + + +def test_string_input(): + pytest.raises(nx.NetworkXException, nx.is_graphical, [], "foo") + pytest.raises(nx.NetworkXException, nx.is_graphical, ["red"], "hh") + pytest.raises(nx.NetworkXException, nx.is_graphical, ["red"], "eg") + + +def test_non_integer_input(): + pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], "eg") + pytest.raises(nx.NetworkXException, nx.is_graphical, [72.5], "hh") + + +def test_negative_input(): + assert not nx.is_graphical([-1], "hh") + assert not nx.is_graphical([-1], "eg") + + +class TestAtlas: + @classmethod + def setup_class(cls): + global atlas + from networkx.generators import atlas + + cls.GAG = atlas.graph_atlas_g() + + def test_atlas(self): + for graph in self.GAG: + deg = (d for n, d in graph.degree()) + assert nx.is_graphical(deg, method="eg") + assert nx.is_graphical(deg, method="hh") + + +def test_small_graph_true(): + z = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") + z = [10, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2] + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") + z = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] + assert nx.is_graphical(z, method="hh") + assert nx.is_graphical(z, method="eg") + + +def test_small_graph_false(): + z = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") + z = [6, 5, 4, 4, 2, 1, 1, 1] + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") + z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] + assert not nx.is_graphical(z, method="hh") + assert not nx.is_graphical(z, method="eg") + + +def test_directed_degree_sequence(): + # Test a range of valid directed degree sequences + n, r = 100, 10 + p = 1.0 / r + for i in range(r): + G = nx.erdos_renyi_graph(n, p * (i + 1), None, True) + din = (d for n, d in G.in_degree()) + dout = (d for n, d in G.out_degree()) + assert nx.is_digraphical(din, dout) + + +def test_small_directed_sequences(): + dout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + din = [3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 1] + assert nx.is_digraphical(din, dout) + # Test nongraphical directed sequence + dout = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + din = [103, 102, 102, 102, 102, 102, 102, 102, 102, 102] + assert not nx.is_digraphical(din, dout) + # Test digraphical small sequence + dout = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] + din = [2, 2, 2, 2, 2, 2, 2, 2, 1, 1] + assert nx.is_digraphical(din, dout) + # Test nonmatching sum + din = [2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1] + assert not nx.is_digraphical(din, dout) + # Test for negative integer in sequence + din = [2, 2, 2, -2, 2, 2, 2, 2, 1, 1, 4] + assert not nx.is_digraphical(din, dout) + # Test for noninteger + din = dout = [1, 1, 1.1, 1] + assert not nx.is_digraphical(din, dout) + din = dout = [1, 1, "rer", 1] + assert not nx.is_digraphical(din, dout) + + +def test_multi_sequence(): + # Test nongraphical multi sequence + seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1] + assert not nx.is_multigraphical(seq) + # Test small graphical multi sequence + seq = [6, 5, 4, 4, 2, 1, 1, 1] + assert nx.is_multigraphical(seq) + # Test for negative integer in sequence + seq = [6, 5, 4, -4, 2, 1, 1, 1] + assert not nx.is_multigraphical(seq) + # Test for sequence with odd sum + seq = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] + assert not nx.is_multigraphical(seq) + # Test for noninteger + seq = [1, 1, 1.1, 1] + assert not nx.is_multigraphical(seq) + seq = [1, 1, "rer", 1] + assert not nx.is_multigraphical(seq) + + +def test_pseudo_sequence(): + # Test small valid pseudo sequence + seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1] + assert nx.is_pseudographical(seq) + # Test for sequence with odd sum + seq = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + assert not nx.is_pseudographical(seq) + # Test for negative integer in sequence + seq = [1000, 3, 3, 3, 3, 2, 2, -2, 1, 1] + assert not nx.is_pseudographical(seq) + # Test for noninteger + seq = [1, 1, 1.1, 1] + assert not nx.is_pseudographical(seq) + seq = [1, 1, "rer", 1] + assert not nx.is_pseudographical(seq) + + +def test_numpy_degree_sequence(): + np = pytest.importorskip("numpy") + ds = np.array([1, 2, 2, 2, 1], dtype=np.int64) + assert nx.is_graphical(ds, "eg") + assert nx.is_graphical(ds, "hh") + ds = np.array([1, 2, 2, 2, 1], dtype=np.float64) + assert nx.is_graphical(ds, "eg") + assert nx.is_graphical(ds, "hh") + ds = np.array([1.1, 2, 2, 2, 1], dtype=np.float64) + pytest.raises(nx.NetworkXException, nx.is_graphical, ds, "eg") + pytest.raises(nx.NetworkXException, nx.is_graphical, ds, "hh") diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hierarchy.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hierarchy.py new file mode 100644 index 0000000..eaa6a67 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hierarchy.py @@ -0,0 +1,46 @@ +import pytest + +import networkx as nx + + +def test_hierarchy_undirected(): + G = nx.cycle_graph(5) + pytest.raises(nx.NetworkXError, nx.flow_hierarchy, G) + + +def test_hierarchy_cycle(): + G = nx.cycle_graph(5, create_using=nx.DiGraph()) + assert nx.flow_hierarchy(G) == 0.0 + + +def test_hierarchy_tree(): + G = nx.full_rary_tree(2, 16, create_using=nx.DiGraph()) + assert nx.flow_hierarchy(G) == 1.0 + + +def test_hierarchy_1(): + G = nx.DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 1), (3, 4), (0, 4)]) + assert nx.flow_hierarchy(G) == 0.5 + + +def test_hierarchy_weight(): + G = nx.DiGraph() + G.add_edges_from( + [ + (0, 1, {"weight": 0.3}), + (1, 2, {"weight": 0.1}), + (2, 3, {"weight": 0.1}), + (3, 1, {"weight": 0.1}), + (3, 4, {"weight": 0.3}), + (0, 4, {"weight": 0.3}), + ] + ) + assert nx.flow_hierarchy(G, weight="weight") == 0.75 + + +@pytest.mark.parametrize("n", (0, 1, 3)) +def test_hierarchy_empty_graph(n): + G = nx.empty_graph(n, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXError, match=".*not applicable to empty graphs"): + nx.flow_hierarchy(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hybrid.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hybrid.py new file mode 100644 index 0000000..6af0016 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_hybrid.py @@ -0,0 +1,24 @@ +import networkx as nx + + +def test_2d_grid_graph(): + # FC article claims 2d grid graph of size n is (3,3)-connected + # and (5,9)-connected, but I don't think it is (5,9)-connected + G = nx.grid_2d_graph(8, 8, periodic=True) + assert nx.is_kl_connected(G, 3, 3) + assert not nx.is_kl_connected(G, 5, 9) + (H, graphOK) = nx.kl_connected_subgraph(G, 5, 9, same_as_graph=True) + assert not graphOK + + +def test_small_graph(): + G = nx.Graph() + G.add_edge(1, 2) + G.add_edge(1, 3) + G.add_edge(2, 3) + assert nx.is_kl_connected(G, 2, 2) + H = nx.kl_connected_subgraph(G, 2, 2) + (H, graphOK) = nx.kl_connected_subgraph( + G, 2, 2, low_memory=True, same_as_graph=True + ) + assert graphOK diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_isolate.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_isolate.py new file mode 100644 index 0000000..d29b306 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_isolate.py @@ -0,0 +1,26 @@ +"""Unit tests for the :mod:`networkx.algorithms.isolates` module.""" + +import networkx as nx + + +def test_is_isolate(): + G = nx.Graph() + G.add_edge(0, 1) + G.add_node(2) + assert not nx.is_isolate(G, 0) + assert not nx.is_isolate(G, 1) + assert nx.is_isolate(G, 2) + + +def test_isolates(): + G = nx.Graph() + G.add_edge(0, 1) + G.add_nodes_from([2, 3]) + assert sorted(nx.isolates(G)) == [2, 3] + + +def test_number_of_isolates(): + G = nx.Graph() + G.add_edge(0, 1) + G.add_nodes_from([2, 3]) + assert nx.number_of_isolates(G) == 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_link_prediction.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_link_prediction.py new file mode 100644 index 0000000..1b8ccf2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_link_prediction.py @@ -0,0 +1,593 @@ +import math +from functools import partial + +import pytest + +import networkx as nx + + +def _test_func(G, ebunch, expected, predict_func, **kwargs): + result = predict_func(G, ebunch, **kwargs) + exp_dict = {tuple(sorted([u, v])): score for u, v, score in expected} + res_dict = {tuple(sorted([u, v])): score for u, v, score in result} + + assert len(exp_dict) == len(res_dict) + for p in exp_dict: + assert exp_dict[p] == pytest.approx(res_dict[p], abs=1e-7) + + +class TestResourceAllocationIndex: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.resource_allocation_index) + cls.test = staticmethod(partial(_test_func, predict_func=cls.func)) + + def test_K5(self): + G = nx.complete_graph(5) + self.test(G, [(0, 1)], [(0, 1, 0.75)]) + + def test_P3(self): + G = nx.path_graph(3) + self.test(G, [(0, 2)], [(0, 2, 0.5)]) + + def test_S4(self): + G = nx.star_graph(4) + self.test(G, [(1, 2)], [(1, 2, 0.25)]) + + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + pytest.raises( + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] + ) + + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + + def test_no_common_neighbor(self): + G = nx.Graph() + G.add_nodes_from([0, 1]) + self.test(G, [(0, 1)], [(0, 1, 0)]) + + def test_equal_nodes(self): + G = nx.complete_graph(4) + self.test(G, [(0, 0)], [(0, 0, 1)]) + + def test_all_nonexistent_edges(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)]) + + +class TestJaccardCoefficient: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.jaccard_coefficient) + cls.test = staticmethod(partial(_test_func, predict_func=cls.func)) + + def test_K5(self): + G = nx.complete_graph(5) + self.test(G, [(0, 1)], [(0, 1, 0.6)]) + + def test_P4(self): + G = nx.path_graph(4) + self.test(G, [(0, 2)], [(0, 2, 0.5)]) + + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + pytest.raises( + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] + ) + + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + + def test_no_common_neighbor(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (2, 3)]) + self.test(G, [(0, 2)], [(0, 2, 0)]) + + def test_isolated_nodes(self): + G = nx.Graph() + G.add_nodes_from([0, 1]) + self.test(G, [(0, 1)], [(0, 1, 0)]) + + def test_all_nonexistent_edges(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + self.test(G, None, [(0, 3, 0.5), (1, 2, 0.5), (1, 3, 0)]) + + +class TestAdamicAdarIndex: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.adamic_adar_index) + cls.test = staticmethod(partial(_test_func, predict_func=cls.func)) + + def test_K5(self): + G = nx.complete_graph(5) + self.test(G, [(0, 1)], [(0, 1, 3 / math.log(4))]) + + def test_P3(self): + G = nx.path_graph(3) + self.test(G, [(0, 2)], [(0, 2, 1 / math.log(2))]) + + def test_S4(self): + G = nx.star_graph(4) + self.test(G, [(1, 2)], [(1, 2, 1 / math.log(4))]) + + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + pytest.raises( + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] + ) + + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + + def test_no_common_neighbor(self): + G = nx.Graph() + G.add_nodes_from([0, 1]) + self.test(G, [(0, 1)], [(0, 1, 0)]) + + def test_equal_nodes(self): + G = nx.complete_graph(4) + self.test(G, [(0, 0)], [(0, 0, 3 / math.log(3))]) + + def test_all_nonexistent_edges(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + self.test( + G, None, [(0, 3, 1 / math.log(2)), (1, 2, 1 / math.log(2)), (1, 3, 0)] + ) + + +class TestCommonNeighborCentrality: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.common_neighbor_centrality) + cls.test = staticmethod(partial(_test_func, predict_func=cls.func)) + + def test_K5(self): + G = nx.complete_graph(5) + self.test(G, [(0, 1)], [(0, 1, 3.0)], alpha=1) + self.test(G, [(0, 1)], [(0, 1, 5.0)], alpha=0) + + def test_P3(self): + G = nx.path_graph(3) + self.test(G, [(0, 2)], [(0, 2, 1.25)], alpha=0.5) + + def test_S4(self): + G = nx.star_graph(4) + self.test(G, [(1, 2)], [(1, 2, 1.75)], alpha=0.5) + + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + pytest.raises( + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] + ) + + def test_node_u_not_found(self): + G = nx.Graph() + G.add_edges_from([(1, 3), (2, 3)]) + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 1)]) + + def test_node_v_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + + def test_no_common_neighbor(self): + G = nx.Graph() + G.add_nodes_from([0, 1]) + self.test(G, [(0, 1)], [(0, 1, 0)]) + + def test_equal_nodes(self): + G = nx.complete_graph(4) + pytest.raises(nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], []) + + def test_equal_nodes_with_alpha_one_raises_error(self): + G = nx.complete_graph(4) + pytest.raises(nx.NetworkXAlgorithmError, self.test, G, [(0, 0)], [], alpha=1.0) + + def test_all_nonexistent_edges(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + self.test(G, None, [(0, 3, 1.5), (1, 2, 1.5), (1, 3, 2 / 3)], alpha=0.5) + + +class TestPreferentialAttachment: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.preferential_attachment) + cls.test = staticmethod(partial(_test_func, predict_func=cls.func)) + + def test_K5(self): + G = nx.complete_graph(5) + self.test(G, [(0, 1)], [(0, 1, 16)]) + + def test_P3(self): + G = nx.path_graph(3) + self.test(G, [(0, 1)], [(0, 1, 2)]) + + def test_S4(self): + G = nx.star_graph(4) + self.test(G, [(0, 2)], [(0, 2, 4)]) + + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + pytest.raises( + nx.NetworkXNotImplemented, self.func, graph_type([(0, 1), (1, 2)]), [(0, 2)] + ) + + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + + def test_zero_degrees(self): + G = nx.Graph() + G.add_nodes_from([0, 1]) + self.test(G, [(0, 1)], [(0, 1, 0)]) + + def test_all_nonexistent_edges(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + self.test(G, None, [(0, 3, 2), (1, 2, 2), (1, 3, 1)]) + + +class TestCNSoundarajanHopcroft: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.cn_soundarajan_hopcroft) + cls.test = staticmethod( + partial(_test_func, predict_func=cls.func, community="community") + ) + + def test_K5(self): + G = nx.complete_graph(5) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 + self.test(G, [(0, 1)], [(0, 1, 5)]) + + def test_P3(self): + G = nx.path_graph(3) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + self.test(G, [(0, 2)], [(0, 2, 1)]) + + def test_S4(self): + G = nx.star_graph(4) + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 + self.test(G, [(1, 2)], [(1, 2, 2)]) + + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + + def test_no_common_neighbor(self): + G = nx.Graph() + G.add_nodes_from([0, 1]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + self.test(G, [(0, 1)], [(0, 1, 0)]) + + def test_equal_nodes(self): + G = nx.complete_graph(3) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + self.test(G, [(0, 0)], [(0, 0, 4)]) + + def test_different_community(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 + self.test(G, [(0, 3)], [(0, 3, 2)]) + + def test_no_community_information(self): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)])) + + def test_insufficient_community_information(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 + pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) + + def test_sufficient_community_information(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 + self.test(G, [(1, 4)], [(1, 4, 4)]) + + def test_custom_community_attribute_name(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 1 + self.test(G, [(0, 3)], [(0, 3, 2)], community="cmty") + + def test_all_nonexistent_edges(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + self.test(G, None, [(0, 3, 2), (1, 2, 1), (1, 3, 0)]) + + +class TestRAIndexSoundarajanHopcroft: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.ra_index_soundarajan_hopcroft) + cls.test = staticmethod( + partial(_test_func, predict_func=cls.func, community="community") + ) + + def test_K5(self): + G = nx.complete_graph(5) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 + self.test(G, [(0, 1)], [(0, 1, 0.5)]) + + def test_P3(self): + G = nx.path_graph(3) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + self.test(G, [(0, 2)], [(0, 2, 0)]) + + def test_S4(self): + G = nx.star_graph(4) + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 + self.test(G, [(1, 2)], [(1, 2, 0.25)]) + + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + + def test_no_common_neighbor(self): + G = nx.Graph() + G.add_nodes_from([0, 1]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + self.test(G, [(0, 1)], [(0, 1, 0)]) + + def test_equal_nodes(self): + G = nx.complete_graph(3) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + self.test(G, [(0, 0)], [(0, 0, 1)]) + + def test_different_community(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 + self.test(G, [(0, 3)], [(0, 3, 0)]) + + def test_no_community_information(self): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)])) + + def test_insufficient_community_information(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 + pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) + + def test_sufficient_community_information(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 + self.test(G, [(1, 4)], [(1, 4, 1)]) + + def test_custom_community_attribute_name(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 1 + self.test(G, [(0, 3)], [(0, 3, 0)], community="cmty") + + def test_all_nonexistent_edges(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + self.test(G, None, [(0, 3, 0.5), (1, 2, 0), (1, 3, 0)]) + + +class TestWithinInterCluster: + @classmethod + def setup_class(cls): + cls.delta = 0.001 + cls.func = staticmethod(nx.within_inter_cluster) + cls.test = staticmethod( + partial( + _test_func, + predict_func=cls.func, + delta=cls.delta, + community="community", + ) + ) + + def test_K5(self): + G = nx.complete_graph(5) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 1 + self.test(G, [(0, 1)], [(0, 1, 2 / (1 + self.delta))]) + + def test_P3(self): + G = nx.path_graph(3) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + self.test(G, [(0, 2)], [(0, 2, 0)]) + + def test_S4(self): + G = nx.star_graph(4) + G.nodes[0]["community"] = 1 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 1 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 + self.test(G, [(1, 2)], [(1, 2, 1 / self.delta)]) + + @pytest.mark.parametrize("graph_type", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) + def test_notimplemented(self, graph_type): + G = graph_type([(0, 1), (1, 2)]) + G.add_nodes_from([0, 1, 2], community=0) + pytest.raises(nx.NetworkXNotImplemented, self.func, G, [(0, 2)]) + + def test_node_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + pytest.raises(nx.NodeNotFound, self.func, G, [(0, 4)]) + + def test_no_common_neighbor(self): + G = nx.Graph() + G.add_nodes_from([0, 1]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + self.test(G, [(0, 1)], [(0, 1, 0)]) + + def test_equal_nodes(self): + G = nx.complete_graph(3) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + self.test(G, [(0, 0)], [(0, 0, 2 / self.delta)]) + + def test_different_community(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 1 + self.test(G, [(0, 3)], [(0, 3, 0)]) + + def test_no_inter_cluster_common_neighbor(self): + G = nx.complete_graph(4) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)]) + + def test_no_community_information(self): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 1)])) + + def test_insufficient_community_information(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 0 + G.nodes[3]["community"] = 0 + pytest.raises(nx.NetworkXAlgorithmError, list, self.func(G, [(0, 3)])) + + def test_sufficient_community_information(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4), (4, 5)]) + G.nodes[1]["community"] = 0 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + G.nodes[4]["community"] = 0 + self.test(G, [(1, 4)], [(1, 4, 2 / self.delta)]) + + def test_invalid_delta(self): + G = nx.complete_graph(3) + G.add_nodes_from([0, 1, 2], community=0) + pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], 0) + pytest.raises(nx.NetworkXAlgorithmError, self.func, G, [(0, 1)], -0.5) + + def test_custom_community_attribute_name(self): + G = nx.complete_graph(4) + G.nodes[0]["cmty"] = 0 + G.nodes[1]["cmty"] = 0 + G.nodes[2]["cmty"] = 0 + G.nodes[3]["cmty"] = 0 + self.test(G, [(0, 3)], [(0, 3, 2 / self.delta)], community="cmty") + + def test_all_nonexistent_edges(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (2, 3)]) + G.nodes[0]["community"] = 0 + G.nodes[1]["community"] = 1 + G.nodes[2]["community"] = 0 + G.nodes[3]["community"] = 0 + self.test(G, None, [(0, 3, 1 / self.delta), (1, 2, 0), (1, 3, 0)]) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py new file mode 100644 index 0000000..639a31f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_lowest_common_ancestors.py @@ -0,0 +1,459 @@ +from itertools import chain, combinations, product + +import pytest + +import networkx as nx + +tree_all_pairs_lca = nx.tree_all_pairs_lowest_common_ancestor +all_pairs_lca = nx.all_pairs_lowest_common_ancestor + + +def get_pair(dictionary, n1, n2): + if (n1, n2) in dictionary: + return dictionary[n1, n2] + else: + return dictionary[n2, n1] + + +class TestTreeLCA: + @classmethod + def setup_class(cls): + cls.DG = nx.DiGraph() + edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)] + cls.DG.add_edges_from(edges) + cls.ans = dict(tree_all_pairs_lca(cls.DG, 0)) + gold = {(n, n): n for n in cls.DG} + gold.update({(0, i): 0 for i in range(1, 7)}) + gold.update( + { + (1, 2): 0, + (1, 3): 1, + (1, 4): 1, + (1, 5): 0, + (1, 6): 0, + (2, 3): 0, + (2, 4): 0, + (2, 5): 2, + (2, 6): 2, + (3, 4): 1, + (3, 5): 0, + (3, 6): 0, + (4, 5): 0, + (4, 6): 0, + (5, 6): 2, + } + ) + + cls.gold = gold + + @staticmethod + def assert_has_same_pairs(d1, d2): + for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)): + assert get_pair(d1, a, b) == get_pair(d2, a, b) + + def test_tree_all_pairs_lca_default_root(self): + assert dict(tree_all_pairs_lca(self.DG)) == self.ans + + def test_tree_all_pairs_lca_return_subset(self): + test_pairs = [(0, 1), (0, 1), (1, 0)] + ans = dict(tree_all_pairs_lca(self.DG, 0, test_pairs)) + assert (0, 1) in ans and (1, 0) in ans + assert len(ans) == 2 + + def test_tree_all_pairs_lca(self): + all_pairs = chain(combinations(self.DG, 2), ((node, node) for node in self.DG)) + + ans = dict(tree_all_pairs_lca(self.DG, 0, all_pairs)) + self.assert_has_same_pairs(ans, self.ans) + + def test_tree_all_pairs_gold_example(self): + ans = dict(tree_all_pairs_lca(self.DG)) + self.assert_has_same_pairs(self.gold, ans) + + def test_tree_all_pairs_lca_invalid_input(self): + empty_digraph = tree_all_pairs_lca(nx.DiGraph()) + pytest.raises(nx.NetworkXPointlessConcept, list, empty_digraph) + + bad_pairs_digraph = tree_all_pairs_lca(self.DG, pairs=[(-1, -2)]) + pytest.raises(nx.NodeNotFound, list, bad_pairs_digraph) + + def test_tree_all_pairs_lca_subtrees(self): + ans = dict(tree_all_pairs_lca(self.DG, 1)) + gold = { + pair: lca + for (pair, lca) in self.gold.items() + if all(n in (1, 3, 4) for n in pair) + } + self.assert_has_same_pairs(gold, ans) + + def test_tree_all_pairs_lca_disconnected_nodes(self): + G = nx.DiGraph() + G.add_node(1) + assert {(1, 1): 1} == dict(tree_all_pairs_lca(G)) + + G.add_node(0) + assert {(1, 1): 1} == dict(tree_all_pairs_lca(G, 1)) + assert {(0, 0): 0} == dict(tree_all_pairs_lca(G, 0)) + + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + + def test_tree_all_pairs_lca_error_if_input_not_tree(self): + # Cycle + G = nx.DiGraph([(1, 2), (2, 1)]) + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + # DAG + G = nx.DiGraph([(0, 2), (1, 2)]) + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + + def test_tree_all_pairs_lca_generator(self): + pairs = iter([(0, 1), (0, 1), (1, 0)]) + some_pairs = dict(tree_all_pairs_lca(self.DG, 0, pairs)) + assert (0, 1) in some_pairs and (1, 0) in some_pairs + assert len(some_pairs) == 2 + + def test_tree_all_pairs_lca_nonexisting_pairs_exception(self): + lca = tree_all_pairs_lca(self.DG, 0, [(-1, -1)]) + pytest.raises(nx.NodeNotFound, list, lca) + # check if node is None + lca = tree_all_pairs_lca(self.DG, None, [(-1, -1)]) + pytest.raises(nx.NodeNotFound, list, lca) + + def test_tree_all_pairs_lca_routine_bails_on_DAGs(self): + G = nx.DiGraph([(3, 4), (5, 4)]) + pytest.raises(nx.NetworkXError, list, tree_all_pairs_lca(G)) + + def test_tree_all_pairs_lca_not_implemented(self): + NNI = nx.NetworkXNotImplemented + G = nx.Graph([(0, 1)]) + with pytest.raises(NNI): + next(tree_all_pairs_lca(G)) + with pytest.raises(NNI): + next(all_pairs_lca(G)) + pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1) + G = nx.MultiGraph([(0, 1)]) + with pytest.raises(NNI): + next(tree_all_pairs_lca(G)) + with pytest.raises(NNI): + next(all_pairs_lca(G)) + pytest.raises(NNI, nx.lowest_common_ancestor, G, 0, 1) + + def test_tree_all_pairs_lca_trees_without_LCAs(self): + G = nx.DiGraph() + G.add_node(3) + ans = list(tree_all_pairs_lca(G)) + assert ans == [((3, 3), 3)] + + +class TestMultiTreeLCA(TestTreeLCA): + @classmethod + def setup_class(cls): + cls.DG = nx.MultiDiGraph() + edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)] + cls.DG.add_edges_from(edges) + cls.ans = dict(tree_all_pairs_lca(cls.DG, 0)) + # add multiedges + cls.DG.add_edges_from(edges) + + gold = {(n, n): n for n in cls.DG} + gold.update({(0, i): 0 for i in range(1, 7)}) + gold.update( + { + (1, 2): 0, + (1, 3): 1, + (1, 4): 1, + (1, 5): 0, + (1, 6): 0, + (2, 3): 0, + (2, 4): 0, + (2, 5): 2, + (2, 6): 2, + (3, 4): 1, + (3, 5): 0, + (3, 6): 0, + (4, 5): 0, + (4, 6): 0, + (5, 6): 2, + } + ) + + cls.gold = gold + + +class TestDAGLCA: + @classmethod + def setup_class(cls): + cls.DG = nx.DiGraph() + nx.add_path(cls.DG, (0, 1, 2, 3)) + nx.add_path(cls.DG, (0, 4, 3)) + nx.add_path(cls.DG, (0, 5, 6, 8, 3)) + nx.add_path(cls.DG, (5, 7, 8)) + cls.DG.add_edge(6, 2) + cls.DG.add_edge(7, 2) + + cls.root_distance = nx.shortest_path_length(cls.DG, source=0) + + cls.gold = { + (1, 1): 1, + (1, 2): 1, + (1, 3): 1, + (1, 4): 0, + (1, 5): 0, + (1, 6): 0, + (1, 7): 0, + (1, 8): 0, + (2, 2): 2, + (2, 3): 2, + (2, 4): 0, + (2, 5): 5, + (2, 6): 6, + (2, 7): 7, + (2, 8): 7, + (3, 3): 3, + (3, 4): 4, + (3, 5): 5, + (3, 6): 6, + (3, 7): 7, + (3, 8): 8, + (4, 4): 4, + (4, 5): 0, + (4, 6): 0, + (4, 7): 0, + (4, 8): 0, + (5, 5): 5, + (5, 6): 5, + (5, 7): 5, + (5, 8): 5, + (6, 6): 6, + (6, 7): 5, + (6, 8): 6, + (7, 7): 7, + (7, 8): 7, + (8, 8): 8, + } + cls.gold.update(((0, n), 0) for n in cls.DG) + + def assert_lca_dicts_same(self, d1, d2, G=None): + """Checks if d1 and d2 contain the same pairs and + have a node at the same distance from root for each. + If G is None use self.DG.""" + if G is None: + G = self.DG + root_distance = self.root_distance + else: + roots = [n for n, deg in G.in_degree if deg == 0] + assert len(roots) == 1 + root_distance = nx.shortest_path_length(G, source=roots[0]) + + for a, b in ((min(pair), max(pair)) for pair in chain(d1, d2)): + assert ( + root_distance[get_pair(d1, a, b)] == root_distance[get_pair(d2, a, b)] + ) + + def test_all_pairs_lca_gold_example(self): + self.assert_lca_dicts_same(dict(all_pairs_lca(self.DG)), self.gold) + + def test_all_pairs_lca_all_pairs_given(self): + all_pairs = list(product(self.DG.nodes(), self.DG.nodes())) + ans = all_pairs_lca(self.DG, pairs=all_pairs) + self.assert_lca_dicts_same(dict(ans), self.gold) + + def test_all_pairs_lca_generator(self): + all_pairs = product(self.DG.nodes(), self.DG.nodes()) + ans = all_pairs_lca(self.DG, pairs=all_pairs) + self.assert_lca_dicts_same(dict(ans), self.gold) + + def test_all_pairs_lca_input_graph_with_two_roots(self): + G = self.DG.copy() + G.add_edge(9, 10) + G.add_edge(9, 4) + gold = self.gold.copy() + gold[9, 9] = 9 + gold[9, 10] = 9 + gold[9, 4] = 9 + gold[9, 3] = 9 + gold[10, 4] = 9 + gold[10, 3] = 9 + gold[10, 10] = 10 + + testing = dict(all_pairs_lca(G)) + + G.add_edge(-1, 9) + G.add_edge(-1, 0) + self.assert_lca_dicts_same(testing, gold, G) + + def test_all_pairs_lca_nonexisting_pairs_exception(self): + pytest.raises(nx.NodeNotFound, all_pairs_lca, self.DG, [(-1, -1)]) + + def test_all_pairs_lca_pairs_without_lca(self): + G = self.DG.copy() + G.add_node(-1) + gen = all_pairs_lca(G, [(-1, -1), (-1, 0)]) + assert dict(gen) == {(-1, -1): -1} + + def test_all_pairs_lca_null_graph(self): + pytest.raises(nx.NetworkXPointlessConcept, all_pairs_lca, nx.DiGraph()) + + def test_all_pairs_lca_non_dags(self): + pytest.raises(nx.NetworkXError, all_pairs_lca, nx.DiGraph([(3, 4), (4, 3)])) + + def test_all_pairs_lca_nonempty_graph_without_lca(self): + G = nx.DiGraph() + G.add_node(3) + ans = list(all_pairs_lca(G)) + assert ans == [((3, 3), 3)] + + def test_all_pairs_lca_bug_gh4942(self): + G = nx.DiGraph([(0, 2), (1, 2), (2, 3)]) + ans = list(all_pairs_lca(G)) + assert len(ans) == 9 + + def test_all_pairs_lca_default_kwarg(self): + G = nx.DiGraph([(0, 1), (2, 1)]) + sentinel = object() + assert nx.lowest_common_ancestor(G, 0, 2, default=sentinel) is sentinel + + def test_all_pairs_lca_identity(self): + G = nx.DiGraph() + G.add_node(3) + assert nx.lowest_common_ancestor(G, 3, 3) == 3 + + def test_all_pairs_lca_issue_4574(self): + G = nx.DiGraph() + G.add_nodes_from(range(17)) + G.add_edges_from( + [ + (2, 0), + (1, 2), + (3, 2), + (5, 2), + (8, 2), + (11, 2), + (4, 5), + (6, 5), + (7, 8), + (10, 8), + (13, 11), + (14, 11), + (15, 11), + (9, 10), + (12, 13), + (16, 15), + ] + ) + + assert nx.lowest_common_ancestor(G, 7, 9) is None + + def test_all_pairs_lca_one_pair_gh4942(self): + G = nx.DiGraph() + # Note: order edge addition is critical to the test + G.add_edge(0, 1) + G.add_edge(2, 0) + G.add_edge(2, 3) + G.add_edge(4, 0) + G.add_edge(5, 2) + + assert nx.lowest_common_ancestor(G, 1, 3) == 2 + + +class TestMultiDiGraph_DAGLCA(TestDAGLCA): + @classmethod + def setup_class(cls): + cls.DG = nx.MultiDiGraph() + nx.add_path(cls.DG, (0, 1, 2, 3)) + # add multiedges + nx.add_path(cls.DG, (0, 1, 2, 3)) + nx.add_path(cls.DG, (0, 4, 3)) + nx.add_path(cls.DG, (0, 5, 6, 8, 3)) + nx.add_path(cls.DG, (5, 7, 8)) + cls.DG.add_edge(6, 2) + cls.DG.add_edge(7, 2) + + cls.root_distance = nx.shortest_path_length(cls.DG, source=0) + + cls.gold = { + (1, 1): 1, + (1, 2): 1, + (1, 3): 1, + (1, 4): 0, + (1, 5): 0, + (1, 6): 0, + (1, 7): 0, + (1, 8): 0, + (2, 2): 2, + (2, 3): 2, + (2, 4): 0, + (2, 5): 5, + (2, 6): 6, + (2, 7): 7, + (2, 8): 7, + (3, 3): 3, + (3, 4): 4, + (3, 5): 5, + (3, 6): 6, + (3, 7): 7, + (3, 8): 8, + (4, 4): 4, + (4, 5): 0, + (4, 6): 0, + (4, 7): 0, + (4, 8): 0, + (5, 5): 5, + (5, 6): 5, + (5, 7): 5, + (5, 8): 5, + (6, 6): 6, + (6, 7): 5, + (6, 8): 6, + (7, 7): 7, + (7, 8): 7, + (8, 8): 8, + } + cls.gold.update(((0, n), 0) for n in cls.DG) + + +def test_all_pairs_lca_self_ancestors(): + """Self-ancestors should always be the node itself, i.e. lca of (0, 0) is 0. + See gh-4458.""" + # DAG for test - note order of node/edge addition is relevant + G = nx.DiGraph() + G.add_nodes_from(range(5)) + G.add_edges_from([(1, 0), (2, 0), (3, 2), (4, 1), (4, 3)]) + + ap_lca = nx.all_pairs_lowest_common_ancestor + assert all(u == v == a for (u, v), a in ap_lca(G) if u == v) + MG = nx.MultiDiGraph(G) + assert all(u == v == a for (u, v), a in ap_lca(MG) if u == v) + MG.add_edges_from([(1, 0), (2, 0)]) + assert all(u == v == a for (u, v), a in ap_lca(MG) if u == v) + + +def test_lca_on_null_graph(): + G = nx.null_graph(create_using=nx.DiGraph) + with pytest.raises( + nx.NetworkXPointlessConcept, match="LCA meaningless on null graphs" + ): + nx.lowest_common_ancestor(G, 0, 0) + + +def test_lca_on_cycle_graph(): + G = nx.cycle_graph(6, create_using=nx.DiGraph) + with pytest.raises( + nx.NetworkXError, match="LCA only defined on directed acyclic graphs" + ): + nx.lowest_common_ancestor(G, 0, 3) + + +def test_lca_multiple_valid_solutions(): + G = nx.DiGraph() + G.add_nodes_from(range(4)) + G.add_edges_from([(2, 0), (3, 0), (2, 1), (3, 1)]) + assert nx.lowest_common_ancestor(G, 0, 1) in {2, 3} + + +def test_lca_dont_rely_on_single_successor(): + # Nodes 0 and 1 have nodes 2 and 3 as immediate ancestors, + # and node 2 also has node 3 as an immediate ancestor. + G = nx.DiGraph() + G.add_nodes_from(range(4)) + G.add_edges_from([(2, 0), (2, 1), (3, 1), (3, 0), (3, 2)]) + assert nx.lowest_common_ancestor(G, 0, 1) == 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_matching.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_matching.py new file mode 100644 index 0000000..703416d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_matching.py @@ -0,0 +1,548 @@ +import math +from itertools import permutations + +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +@pytest.mark.parametrize( + "fn", (nx.is_matching, nx.is_maximal_matching, nx.is_perfect_matching) +) +@pytest.mark.parametrize( + "edgeset", + ( + {(0, 5)}, # Single edge, node not in G + {(5, 0)}, # for both edge orders + {(0, 5), (2, 3)}, # node not in G, but other edge is valid matching + {(5, 5), (2, 3)}, # Self-loop hits node not in G validation first + ), +) +def test_is_matching_node_not_in_G(fn, edgeset): + """All is_*matching functions have consistent exception message for node + not in G.""" + G = nx.path_graph(4) + with pytest.raises(nx.NetworkXError, match="matching.*with node not in G"): + fn(G, edgeset) + + +@pytest.mark.parametrize( + "fn", (nx.is_matching, nx.is_maximal_matching, nx.is_perfect_matching) +) +@pytest.mark.parametrize( + "edgeset", + ( + {(0, 1, 2), (2, 3)}, # 3-tuple + {(0,), (2, 3)}, # 1-tuple + ), +) +def test_is_matching_invalid_edge(fn, edgeset): + """All is_*matching functions have consistent exception message for invalid + edges in matching.""" + G = nx.path_graph(4) + with pytest.raises(nx.NetworkXError, match=".*non-2-tuple edge.*"): + fn(G, edgeset) + + +@pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.DiGraph, nx.MultiDiGraph)) +@pytest.mark.parametrize( + "fn", (nx.max_weight_matching, nx.min_weight_matching, nx.maximal_matching) +) +def test_wrong_graph_type(fn, graph_type): + G = graph_type() + with pytest.raises(nx.NetworkXNotImplemented): + fn(G) + + +class TestMaxWeightMatching: + """Unit tests for the + :func:`~networkx.algorithms.matching.max_weight_matching` function. + + """ + + def test_trivial1(self): + """Empty graph""" + G = nx.Graph() + assert nx.max_weight_matching(G) == set() + assert nx.min_weight_matching(G) == set() + + def test_selfloop(self): + G = nx.Graph() + G.add_edge(0, 0, weight=100) + assert nx.max_weight_matching(G) == set() + assert nx.min_weight_matching(G) == set() + + def test_single_edge(self): + G = nx.Graph() + G.add_edge(0, 1) + assert edges_equal(nx.max_weight_matching(G), {(0, 1)}) + assert edges_equal(nx.min_weight_matching(G), {(0, 1)}) + + def test_two_path(self): + G = nx.Graph() + G.add_edge("one", "two", weight=10) + G.add_edge("two", "three", weight=11) + assert edges_equal(nx.max_weight_matching(G), {("two", "three")}) + assert edges_equal(nx.min_weight_matching(G), {("one", "two")}) + + def test_path(self): + G = nx.Graph() + G.add_edge(1, 2, weight=5) + G.add_edge(2, 3, weight=11) + G.add_edge(3, 4, weight=5) + assert edges_equal(nx.max_weight_matching(G), {(2, 3)}) + assert edges_equal(nx.max_weight_matching(G, 1), {(1, 2), (3, 4)}) + assert edges_equal(nx.min_weight_matching(G), {(1, 2), (3, 4)}) + assert edges_equal(nx.min_weight_matching(G, 1), {(1, 2), (3, 4)}) + + def test_square(self): + G = nx.Graph() + G.add_edge(1, 4, weight=2) + G.add_edge(2, 3, weight=2) + G.add_edge(1, 2, weight=1) + G.add_edge(3, 4, weight=4) + assert edges_equal(nx.max_weight_matching(G), {(1, 2), (3, 4)}) + assert edges_equal(nx.min_weight_matching(G), {(1, 4), (2, 3)}) + + def test_edge_attribute_name(self): + G = nx.Graph() + G.add_edge("one", "two", weight=10, abcd=11) + G.add_edge("two", "three", weight=11, abcd=10) + assert edges_equal(nx.max_weight_matching(G, weight="abcd"), {("one", "two")}) + assert edges_equal(nx.min_weight_matching(G, weight="abcd"), {("two", "three")}) + + def test_floating_point_weights(self): + G = nx.Graph() + G.add_edge(1, 2, weight=math.pi) + G.add_edge(2, 3, weight=math.exp(1)) + G.add_edge(1, 3, weight=3.0) + G.add_edge(1, 4, weight=math.sqrt(2.0)) + assert edges_equal(nx.max_weight_matching(G), {(1, 4), (2, 3)}) + assert edges_equal(nx.min_weight_matching(G), {(1, 4), (2, 3)}) + + def test_negative_weights(self): + G = nx.Graph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=-2) + G.add_edge(2, 3, weight=1) + G.add_edge(2, 4, weight=-1) + G.add_edge(3, 4, weight=-6) + assert edges_equal(nx.max_weight_matching(G), {(1, 2)}) + assert edges_equal( + nx.max_weight_matching(G, maxcardinality=True), {(1, 3), (2, 4)} + ) + assert edges_equal(nx.min_weight_matching(G), {(1, 2), (3, 4)}) + + def test_s_blossom(self): + """Create S-blossom and use it for augmentation:""" + G = nx.Graph() + G.add_weighted_edges_from([(1, 2, 8), (1, 3, 9), (2, 3, 10), (3, 4, 7)]) + answer = {(1, 2), (3, 4)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + G.add_weighted_edges_from([(1, 6, 5), (4, 5, 6)]) + answer = {(1, 6), (2, 3), (4, 5)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_s_t_blossom(self): + """Create S-blossom, relabel as T-blossom, use for augmentation:""" + G = nx.Graph() + G.add_weighted_edges_from( + [(1, 2, 9), (1, 3, 8), (2, 3, 10), (1, 4, 5), (4, 5, 4), (1, 6, 3)] + ) + answer = {(1, 6), (2, 3), (4, 5)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + G.add_edge(4, 5, weight=3) + G.add_edge(1, 6, weight=4) + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + G.remove_edge(1, 6) + G.add_edge(3, 6, weight=4) + answer = {(1, 2), (3, 6), (4, 5)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_nested_s_blossom(self): + """Create nested S-blossom, use for augmentation:""" + + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 9), + (1, 3, 9), + (2, 3, 10), + (2, 4, 8), + (3, 5, 8), + (4, 5, 10), + (5, 6, 6), + ] + ) + expected_edgeset = {(1, 3), (2, 4), (5, 6)} + expected = {frozenset(e) for e in expected_edgeset} + answer = {frozenset(e) for e in nx.max_weight_matching(G)} + assert answer == expected + answer = {frozenset(e) for e in nx.min_weight_matching(G)} + assert answer == expected + + def test_nested_s_blossom_relabel(self): + """Create S-blossom, relabel as S, include in nested S-blossom:""" + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 10), + (1, 7, 10), + (2, 3, 12), + (3, 4, 20), + (3, 5, 20), + (4, 5, 25), + (5, 6, 10), + (6, 7, 10), + (7, 8, 8), + ] + ) + answer = {(1, 2), (3, 4), (5, 6), (7, 8)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_nested_s_blossom_expand(self): + """Create nested S-blossom, augment, expand recursively:""" + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 8), + (1, 3, 8), + (2, 3, 10), + (2, 4, 12), + (3, 5, 12), + (4, 5, 14), + (4, 6, 12), + (5, 7, 12), + (6, 7, 14), + (7, 8, 12), + ] + ) + answer = {(1, 2), (3, 5), (4, 6), (7, 8)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_s_blossom_relabel_expand(self): + """Create S-blossom, relabel as T, expand:""" + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 23), + (1, 5, 22), + (1, 6, 15), + (2, 3, 25), + (3, 4, 22), + (4, 5, 25), + (4, 8, 14), + (5, 7, 13), + ] + ) + answer = {(1, 6), (2, 3), (4, 8), (5, 7)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_nested_s_blossom_relabel_expand(self): + """Create nested S-blossom, relabel as T, expand:""" + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 19), + (1, 3, 20), + (1, 8, 8), + (2, 3, 25), + (2, 4, 18), + (3, 5, 18), + (4, 5, 13), + (4, 7, 7), + (5, 6, 7), + ] + ) + answer = {(1, 8), (2, 3), (4, 7), (5, 6)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_nasty_blossom1(self): + """Create blossom, relabel as T in more than one way, expand, + augment: + """ + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 35), + (5, 7, 26), + (9, 10, 5), + ] + ) + answer = {(1, 6), (2, 3), (4, 8), (5, 7), (9, 10)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_nasty_blossom2(self): + """Again but slightly different:""" + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 26), + (5, 7, 40), + (9, 10, 5), + ] + ) + answer = {(1, 6), (2, 3), (4, 8), (5, 7), (9, 10)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_nasty_blossom_least_slack(self): + """Create blossom, relabel as T, expand such that a new + least-slack S-to-free dge is produced, augment: + """ + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 5, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 50), + (1, 6, 30), + (3, 9, 35), + (4, 8, 28), + (5, 7, 26), + (9, 10, 5), + ] + ) + answer = {(1, 6), (2, 3), (4, 8), (5, 7), (9, 10)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_nasty_blossom_augmenting(self): + """Create nested blossom, relabel as T in more than one way""" + # expand outer blossom such that inner blossom ends up on an + # augmenting path: + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 45), + (1, 7, 45), + (2, 3, 50), + (3, 4, 45), + (4, 5, 95), + (4, 6, 94), + (5, 6, 94), + (6, 7, 50), + (1, 8, 30), + (3, 11, 35), + (5, 9, 36), + (7, 10, 26), + (11, 12, 5), + ] + ) + answer = {(1, 8), (2, 3), (4, 6), (5, 9), (7, 10), (11, 12)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + def test_nasty_blossom_expand_recursively(self): + """Create nested S-blossom, relabel as S, expand recursively:""" + G = nx.Graph() + G.add_weighted_edges_from( + [ + (1, 2, 40), + (1, 3, 40), + (2, 3, 60), + (2, 4, 55), + (3, 5, 55), + (4, 5, 50), + (1, 8, 15), + (5, 7, 30), + (7, 6, 10), + (8, 10, 10), + (4, 9, 30), + ] + ) + answer = {(1, 2), (3, 5), (4, 9), (6, 7), (8, 10)} + assert edges_equal(nx.max_weight_matching(G), answer) + assert edges_equal(nx.min_weight_matching(G), answer) + + +class TestIsMatching: + """Unit tests for the + :func:`~networkx.algorithms.matching.is_matching` function. + + """ + + def test_dict(self): + G = nx.path_graph(4) + assert nx.is_matching(G, {0: 1, 1: 0, 2: 3, 3: 2}) + + def test_empty_matching(self): + G = nx.path_graph(4) + assert nx.is_matching(G, set()) + + def test_single_edge(self): + G = nx.path_graph(4) + assert nx.is_matching(G, {(1, 2)}) + + def test_edge_order(self): + G = nx.path_graph(4) + assert nx.is_matching(G, {(0, 1), (2, 3)}) + assert nx.is_matching(G, {(1, 0), (2, 3)}) + assert nx.is_matching(G, {(0, 1), (3, 2)}) + assert nx.is_matching(G, {(1, 0), (3, 2)}) + + def test_valid_matching(self): + G = nx.path_graph(4) + assert nx.is_matching(G, {(0, 1), (2, 3)}) + + def test_selfloops(self): + G = nx.path_graph(4) + # selfloop edge not in G + assert not nx.is_matching(G, {(0, 0), (1, 2), (2, 3)}) + # selfloop edge in G + G.add_edge(0, 0) + assert not nx.is_matching(G, {(0, 0), (1, 2)}) + + def test_invalid_matching(self): + G = nx.path_graph(4) + assert not nx.is_matching(G, {(0, 1), (1, 2), (2, 3)}) + + def test_invalid_edge(self): + G = nx.path_graph(4) + assert not nx.is_matching(G, {(0, 3), (1, 2)}) + + G = nx.DiGraph(G.edges) + assert nx.is_matching(G, {(0, 1)}) + assert not nx.is_matching(G, {(1, 0)}) + + +class TestIsMaximalMatching: + """Unit tests for the + :func:`~networkx.algorithms.matching.is_maximal_matching` function. + + """ + + def test_dict(self): + G = nx.path_graph(4) + assert nx.is_maximal_matching(G, {0: 1, 1: 0, 2: 3, 3: 2}) + + def test_valid(self): + G = nx.path_graph(4) + assert nx.is_maximal_matching(G, {(0, 1), (2, 3)}) + + def test_not_matching(self): + G = nx.path_graph(4) + assert not nx.is_maximal_matching(G, {(0, 1), (1, 2), (2, 3)}) + assert not nx.is_maximal_matching(G, {(0, 3)}) + G.add_edge(0, 0) + assert not nx.is_maximal_matching(G, {(0, 0)}) + + def test_not_maximal(self): + G = nx.path_graph(4) + assert not nx.is_maximal_matching(G, {(0, 1)}) + + +class TestIsPerfectMatching: + """Unit tests for the + :func:`~networkx.algorithms.matching.is_perfect_matching` function. + + """ + + def test_dict(self): + G = nx.path_graph(4) + assert nx.is_perfect_matching(G, {0: 1, 1: 0, 2: 3, 3: 2}) + + def test_valid(self): + G = nx.path_graph(4) + assert nx.is_perfect_matching(G, {(0, 1), (2, 3)}) + + def test_valid_not_path(self): + G = nx.cycle_graph(4) + G.add_edge(0, 4) + G.add_edge(1, 4) + G.add_edge(5, 2) + + assert nx.is_perfect_matching(G, {(1, 4), (0, 3), (5, 2)}) + + def test_selfloops(self): + G = nx.path_graph(4) + # selfloop edge not in G + assert not nx.is_perfect_matching(G, {(0, 0), (1, 2), (2, 3)}) + # selfloop edge in G + G.add_edge(0, 0) + assert not nx.is_perfect_matching(G, {(0, 0), (1, 2)}) + + def test_not_matching(self): + G = nx.path_graph(4) + assert not nx.is_perfect_matching(G, {(0, 3)}) + assert not nx.is_perfect_matching(G, {(0, 1), (1, 2), (2, 3)}) + + def test_maximal_but_not_perfect(self): + G = nx.cycle_graph(4) + G.add_edge(0, 4) + G.add_edge(1, 4) + + assert not nx.is_perfect_matching(G, {(1, 4), (0, 3)}) + + +class TestMaximalMatching: + """Unit tests for the + :func:`~networkx.algorithms.matching.maximal_matching`. + + """ + + def test_valid_matching(self): + edges = [(1, 2), (1, 5), (2, 3), (2, 5), (3, 4), (3, 6), (5, 6)] + G = nx.Graph(edges) + matching = nx.maximal_matching(G) + assert nx.is_maximal_matching(G, matching) + + def test_single_edge_matching(self): + # In the star graph, any maximal matching has just one edge. + G = nx.star_graph(5) + matching = nx.maximal_matching(G) + assert 1 == len(matching) + assert nx.is_maximal_matching(G, matching) + + def test_self_loops(self): + # Create the path graph with two self-loops. + G = nx.path_graph(3) + G.add_edges_from([(0, 0), (1, 1)]) + matching = nx.maximal_matching(G) + assert len(matching) == 1 + # The matching should never include self-loops. + assert not any(u == v for u, v in matching) + assert nx.is_maximal_matching(G, matching) + + def test_ordering(self): + """Tests that a maximal matching is computed correctly + regardless of the order in which nodes are added to the graph. + + """ + for nodes in permutations(range(3)): + G = nx.Graph() + G.add_nodes_from(nodes) + G.add_edges_from([(0, 1), (0, 2)]) + matching = nx.maximal_matching(G) + assert len(matching) == 1 + assert nx.is_maximal_matching(G, matching) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_max_weight_clique.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_max_weight_clique.py new file mode 100644 index 0000000..6cd8584 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_max_weight_clique.py @@ -0,0 +1,179 @@ +"""Maximum weight clique test suite.""" + +import pytest + +import networkx as nx + + +class TestMaximumWeightClique: + def test_basic_cases(self): + def check_basic_case(graph_func, expected_weight, weight_accessor): + graph = graph_func() + clique, weight = nx.algorithms.max_weight_clique(graph, weight_accessor) + assert verify_clique( + graph, clique, weight, expected_weight, weight_accessor + ) + + for graph_func, (expected_weight, expected_size) in TEST_CASES.items(): + check_basic_case(graph_func, expected_weight, "weight") + check_basic_case(graph_func, expected_size, None) + + def test_key_error(self): + graph = two_node_graph() + with pytest.raises(KeyError): + nx.algorithms.max_weight_clique(graph, "nonexistent-key") + + def test_error_on_non_integer_weight(self): + graph = two_node_graph() + graph.nodes[2]["weight"] = 1.5 + with pytest.raises(ValueError): + nx.algorithms.max_weight_clique(graph) + + def test_unaffected_by_self_loops(self): + graph = two_node_graph() + graph.add_edge(1, 1) + graph.add_edge(2, 2) + clique, weight = nx.algorithms.max_weight_clique(graph, "weight") + assert verify_clique(graph, clique, weight, 30, "weight") + graph = three_node_independent_set() + graph.add_edge(1, 1) + clique, weight = nx.algorithms.max_weight_clique(graph, "weight") + assert verify_clique(graph, clique, weight, 20, "weight") + + def test_30_node_prob(self): + G = nx.Graph() + G.add_nodes_from(range(1, 31)) + for i in range(1, 31): + G.nodes[i]["weight"] = i + 1 + # fmt: off + G.add_edges_from( + [ + (1, 12), (1, 13), (1, 15), (1, 16), (1, 18), (1, 19), (1, 20), + (1, 23), (1, 26), (1, 28), (1, 29), (1, 30), (2, 3), (2, 4), + (2, 5), (2, 8), (2, 9), (2, 10), (2, 14), (2, 17), (2, 18), + (2, 21), (2, 22), (2, 23), (2, 27), (3, 9), (3, 15), (3, 21), + (3, 22), (3, 23), (3, 24), (3, 27), (3, 28), (3, 29), (4, 5), + (4, 6), (4, 8), (4, 21), (4, 22), (4, 23), (4, 26), (4, 28), + (4, 30), (5, 6), (5, 8), (5, 9), (5, 13), (5, 14), (5, 15), + (5, 16), (5, 20), (5, 21), (5, 22), (5, 25), (5, 28), (5, 29), + (6, 7), (6, 8), (6, 13), (6, 17), (6, 18), (6, 19), (6, 24), + (6, 26), (6, 27), (6, 28), (6, 29), (7, 12), (7, 14), (7, 15), + (7, 16), (7, 17), (7, 20), (7, 25), (7, 27), (7, 29), (7, 30), + (8, 10), (8, 15), (8, 16), (8, 18), (8, 20), (8, 22), (8, 24), + (8, 26), (8, 27), (8, 28), (8, 30), (9, 11), (9, 12), (9, 13), + (9, 14), (9, 15), (9, 16), (9, 19), (9, 20), (9, 21), (9, 24), + (9, 30), (10, 12), (10, 15), (10, 18), (10, 19), (10, 20), + (10, 22), (10, 23), (10, 24), (10, 26), (10, 27), (10, 29), + (10, 30), (11, 13), (11, 15), (11, 16), (11, 17), (11, 18), + (11, 19), (11, 20), (11, 22), (11, 29), (11, 30), (12, 14), + (12, 17), (12, 18), (12, 19), (12, 20), (12, 21), (12, 23), + (12, 25), (12, 26), (12, 30), (13, 20), (13, 22), (13, 23), + (13, 24), (13, 30), (14, 16), (14, 20), (14, 21), (14, 22), + (14, 23), (14, 25), (14, 26), (14, 27), (14, 29), (14, 30), + (15, 17), (15, 18), (15, 20), (15, 21), (15, 26), (15, 27), + (15, 28), (16, 17), (16, 18), (16, 19), (16, 20), (16, 21), + (16, 29), (16, 30), (17, 18), (17, 21), (17, 22), (17, 25), + (17, 27), (17, 28), (17, 30), (18, 19), (18, 20), (18, 21), + (18, 22), (18, 23), (18, 24), (19, 20), (19, 22), (19, 23), + (19, 24), (19, 25), (19, 27), (19, 30), (20, 21), (20, 23), + (20, 24), (20, 26), (20, 28), (20, 29), (21, 23), (21, 26), + (21, 27), (21, 29), (22, 24), (22, 25), (22, 26), (22, 29), + (23, 25), (23, 30), (24, 25), (24, 26), (25, 27), (25, 29), + (26, 27), (26, 28), (26, 30), (28, 29), (29, 30), + ] + ) + # fmt: on + clique, weight = nx.algorithms.max_weight_clique(G) + assert verify_clique(G, clique, weight, 111, "weight") + + +# ############################ Utility functions ############################ +def verify_clique( + graph, clique, reported_clique_weight, expected_clique_weight, weight_accessor +): + for node1 in clique: + for node2 in clique: + if node1 == node2: + continue + if not graph.has_edge(node1, node2): + return False + + if weight_accessor is None: + clique_weight = len(clique) + else: + clique_weight = sum(graph.nodes[v]["weight"] for v in clique) + + if clique_weight != expected_clique_weight: + return False + if clique_weight != reported_clique_weight: + return False + + return True + + +# ############################ Graph Generation ############################ + + +def empty_graph(): + return nx.Graph() + + +def one_node_graph(): + graph = nx.Graph() + graph.add_nodes_from([1]) + graph.nodes[1]["weight"] = 10 + return graph + + +def two_node_graph(): + graph = nx.Graph() + graph.add_nodes_from([1, 2]) + graph.add_edges_from([(1, 2)]) + graph.nodes[1]["weight"] = 10 + graph.nodes[2]["weight"] = 20 + return graph + + +def three_node_clique(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3]) + graph.add_edges_from([(1, 2), (1, 3), (2, 3)]) + graph.nodes[1]["weight"] = 10 + graph.nodes[2]["weight"] = 20 + graph.nodes[3]["weight"] = 5 + return graph + + +def three_node_independent_set(): + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3]) + graph.nodes[1]["weight"] = 10 + graph.nodes[2]["weight"] = 20 + graph.nodes[3]["weight"] = 5 + return graph + + +def disconnected(): + graph = nx.Graph() + graph.add_edges_from([(1, 2), (2, 3), (4, 5), (5, 6)]) + graph.nodes[1]["weight"] = 10 + graph.nodes[2]["weight"] = 20 + graph.nodes[3]["weight"] = 5 + graph.nodes[4]["weight"] = 100 + graph.nodes[5]["weight"] = 200 + graph.nodes[6]["weight"] = 50 + return graph + + +# -------------------------------------------------------------------------- +# Basic tests for all strategies +# For each basic graph function, specify expected weight of max weight clique +# and expected size of maximum clique +TEST_CASES = { + empty_graph: (0, 0), + one_node_graph: (10, 1), + two_node_graph: (30, 2), + three_node_clique: (35, 3), + three_node_independent_set: (20, 1), + disconnected: (300, 2), +} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_mis.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_mis.py new file mode 100644 index 0000000..02be02d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_mis.py @@ -0,0 +1,62 @@ +""" +Tests for maximal (not maximum) independent sets. + +""" + +import random + +import pytest + +import networkx as nx + + +def test_random_seed(): + G = nx.empty_graph(5) + assert nx.maximal_independent_set(G, seed=1) == [1, 0, 3, 2, 4] + + +@pytest.mark.parametrize("graph", [nx.complete_graph(5), nx.complete_graph(55)]) +def test_K5(graph): + """Maximal independent set for complete graphs""" + assert all(nx.maximal_independent_set(graph, [n]) == [n] for n in graph) + + +def test_exceptions(): + """Bad input should raise exception.""" + G = nx.florentine_families_graph() + pytest.raises(nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Smith"]) + pytest.raises( + nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Salviati", "Pazzi"] + ) + # MaximalIndependentSet is not implemented for directed graphs + pytest.raises(nx.NetworkXNotImplemented, nx.maximal_independent_set, nx.DiGraph(G)) + + +def test_florentine_family(): + G = nx.florentine_families_graph() + indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"]) + assert set(indep) == { + "Medici", + "Bischeri", + "Castellani", + "Pazzi", + "Ginori", + "Lamberteschi", + } + + +def test_bipartite(): + G = nx.complete_bipartite_graph(12, 34) + indep = nx.maximal_independent_set(G, [4, 5, 9, 10]) + assert sorted(indep) == list(range(12)) + + +def test_random_graphs(): + """Generate 5 random graphs of different types and sizes and + make sure that all sets are independent and maximal.""" + for i in range(0, 50, 10): + G = nx.erdos_renyi_graph(i * 10 + 1, random.random()) + IS = nx.maximal_independent_set(G) + assert G.subgraph(IS).number_of_edges() == 0 + nbrs_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS)) + assert all(v in nbrs_of_MIS for v in set(G.nodes()).difference(IS)) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_moral.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_moral.py new file mode 100644 index 0000000..fc98c97 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_moral.py @@ -0,0 +1,15 @@ +import networkx as nx +from networkx.algorithms.moral import moral_graph + + +def test_get_moral_graph(): + graph = nx.DiGraph() + graph.add_nodes_from([1, 2, 3, 4, 5, 6, 7]) + graph.add_edges_from([(1, 2), (3, 2), (4, 1), (4, 5), (6, 5), (7, 5)]) + H = moral_graph(graph) + assert not H.is_directed() + assert H.has_edge(1, 3) + assert H.has_edge(4, 6) + assert H.has_edge(6, 7) + assert H.has_edge(4, 7) + assert not H.has_edge(1, 5) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_node_classification.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_node_classification.py new file mode 100644 index 0000000..2e1fc79 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_node_classification.py @@ -0,0 +1,140 @@ +import pytest + +pytest.importorskip("numpy") +pytest.importorskip("scipy") + +import networkx as nx +from networkx.algorithms import node_classification + + +class TestHarmonicFunction: + def test_path_graph(self): + G = nx.path_graph(4) + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" + predicted = node_classification.harmonic_function(G, label_name=label_name) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "B" + assert predicted[3] == "B" + + def test_no_labels(self): + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + node_classification.harmonic_function(G) + + def test_no_nodes(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + node_classification.harmonic_function(G) + + def test_no_edges(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + node_classification.harmonic_function(G) + + def test_digraph(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + G.add_edge(0, 1) + G.add_edge(1, 2) + G.add_edge(2, 3) + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" + node_classification.harmonic_function(G) + + def test_one_labeled_node(self): + G = nx.path_graph(4) + label_name = "label" + G.nodes[0][label_name] = "A" + predicted = node_classification.harmonic_function(G, label_name=label_name) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "A" + assert predicted[3] == "A" + + def test_nodes_all_labeled(self): + G = nx.karate_club_graph() + label_name = "club" + predicted = node_classification.harmonic_function(G, label_name=label_name) + for i in range(len(G)): + assert predicted[i] == G.nodes[i][label_name] + + def test_labeled_nodes_are_not_changed(self): + G = nx.karate_club_graph() + label_name = "club" + label_removed = {0, 1, 2, 3, 4, 5, 6, 7} + for i in label_removed: + del G.nodes[i][label_name] + predicted = node_classification.harmonic_function(G, label_name=label_name) + label_not_removed = set(range(len(G))) - label_removed + for i in label_not_removed: + assert predicted[i] == G.nodes[i][label_name] + + +class TestLocalAndGlobalConsistency: + def test_path_graph(self): + G = nx.path_graph(4) + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" + predicted = node_classification.local_and_global_consistency( + G, label_name=label_name + ) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "B" + assert predicted[3] == "B" + + def test_no_labels(self): + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(4) + node_classification.local_and_global_consistency(G) + + def test_no_nodes(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + node_classification.local_and_global_consistency(G) + + def test_no_edges(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + node_classification.local_and_global_consistency(G) + + def test_digraph(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + G.add_edge(0, 1) + G.add_edge(1, 2) + G.add_edge(2, 3) + label_name = "label" + G.nodes[0][label_name] = "A" + G.nodes[3][label_name] = "B" + node_classification.harmonic_function(G) + + def test_one_labeled_node(self): + G = nx.path_graph(4) + label_name = "label" + G.nodes[0][label_name] = "A" + predicted = node_classification.local_and_global_consistency( + G, label_name=label_name + ) + assert predicted[0] == "A" + assert predicted[1] == "A" + assert predicted[2] == "A" + assert predicted[3] == "A" + + def test_nodes_all_labeled(self): + G = nx.karate_club_graph() + label_name = "club" + predicted = node_classification.local_and_global_consistency( + G, alpha=0, label_name=label_name + ) + for i in range(len(G)): + assert predicted[i] == G.nodes[i][label_name] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_non_randomness.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_non_randomness.py new file mode 100644 index 0000000..2f495be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_non_randomness.py @@ -0,0 +1,42 @@ +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") + + +@pytest.mark.parametrize( + "k, weight, expected", + [ + (None, None, 7.21), # infers 3 communities + (2, None, 11.7), + (None, "weight", 25.45), + (2, "weight", 38.8), + ], +) +def test_non_randomness(k, weight, expected): + G = nx.karate_club_graph() + np.testing.assert_almost_equal( + nx.non_randomness(G, k, weight)[0], expected, decimal=2 + ) + + +def test_non_connected(): + G = nx.Graph([(1, 2)]) + G.add_node(3) + with pytest.raises(nx.NetworkXException, match="Non connected"): + nx.non_randomness(G) + + +def test_self_loops(): + G = nx.Graph() + G.add_edge(1, 2) + G.add_edge(1, 1) + with pytest.raises(nx.NetworkXError, match="Graph must not contain self-loops"): + nx.non_randomness(G) + + +def test_empty_graph(): + G = nx.empty_graph(1) + with pytest.raises(nx.NetworkXError, match=".*not applicable to empty graphs"): + nx.non_randomness(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planar_drawing.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planar_drawing.py new file mode 100644 index 0000000..b59d5c1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planar_drawing.py @@ -0,0 +1,274 @@ +import math + +import pytest + +import networkx as nx +from networkx.algorithms.planar_drawing import triangulate_embedding + + +def test_graph1(): + embedding_data = {0: [1, 2, 3], 1: [2, 0], 2: [3, 0, 1], 3: [2, 0]} + check_embedding_data(embedding_data) + + +def test_graph2(): + embedding_data = { + 0: [8, 6], + 1: [2, 6, 9], + 2: [8, 1, 7, 9, 6, 4], + 3: [9], + 4: [2], + 5: [6, 8], + 6: [9, 1, 0, 5, 2], + 7: [9, 2], + 8: [0, 2, 5], + 9: [1, 6, 2, 7, 3], + } + check_embedding_data(embedding_data) + + +def test_circle_graph(): + embedding_data = { + 0: [1, 9], + 1: [0, 2], + 2: [1, 3], + 3: [2, 4], + 4: [3, 5], + 5: [4, 6], + 6: [5, 7], + 7: [6, 8], + 8: [7, 9], + 9: [8, 0], + } + check_embedding_data(embedding_data) + + +def test_grid_graph(): + embedding_data = { + (0, 1): [(0, 0), (1, 1), (0, 2)], + (1, 2): [(1, 1), (2, 2), (0, 2)], + (0, 0): [(0, 1), (1, 0)], + (2, 1): [(2, 0), (2, 2), (1, 1)], + (1, 1): [(2, 1), (1, 2), (0, 1), (1, 0)], + (2, 0): [(1, 0), (2, 1)], + (2, 2): [(1, 2), (2, 1)], + (1, 0): [(0, 0), (2, 0), (1, 1)], + (0, 2): [(1, 2), (0, 1)], + } + check_embedding_data(embedding_data) + + +def test_one_node_graph(): + embedding_data = {0: []} + check_embedding_data(embedding_data) + + +def test_two_node_graph(): + embedding_data = {0: [1], 1: [0]} + check_embedding_data(embedding_data) + + +def test_three_node_graph(): + embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1]} + check_embedding_data(embedding_data) + + +def test_multiple_component_graph1(): + embedding_data = {0: [], 1: []} + check_embedding_data(embedding_data) + + +def test_multiple_component_graph2(): + embedding_data = {0: [1, 2], 1: [0, 2], 2: [0, 1], 3: [4, 5], 4: [3, 5], 5: [3, 4]} + check_embedding_data(embedding_data) + + +def test_invalid_half_edge(): + with pytest.raises(nx.NetworkXException): + embedding_data = {1: [2, 3, 4], 2: [1, 3, 4], 3: [1, 2, 4], 4: [1, 2, 3]} + embedding = nx.PlanarEmbedding() + embedding.set_data(embedding_data) + nx.combinatorial_embedding_to_pos(embedding) + + +def test_triangulate_embedding1(): + embedding = nx.PlanarEmbedding() + embedding.add_node(1) + expected_embedding = {1: []} + check_triangulation(embedding, expected_embedding) + + +def test_triangulate_embedding2(): + embedding = nx.PlanarEmbedding() + embedding.connect_components(1, 2) + expected_embedding = {1: [2], 2: [1]} + check_triangulation(embedding, expected_embedding) + + +def check_triangulation(embedding, expected_embedding): + res_embedding, _ = triangulate_embedding(embedding, True) + assert res_embedding.get_data() == expected_embedding, ( + "Expected embedding incorrect" + ) + res_embedding, _ = triangulate_embedding(embedding, False) + assert res_embedding.get_data() == expected_embedding, ( + "Expected embedding incorrect" + ) + + +def check_embedding_data(embedding_data): + """Checks that the planar embedding of the input is correct""" + embedding = nx.PlanarEmbedding() + embedding.set_data(embedding_data) + pos_fully = nx.combinatorial_embedding_to_pos(embedding, False) + msg = "Planar drawing does not conform to the embedding (fully triangulation)" + assert planar_drawing_conforms_to_embedding(embedding, pos_fully), msg + check_edge_intersections(embedding, pos_fully) + pos_internally = nx.combinatorial_embedding_to_pos(embedding, True) + msg = "Planar drawing does not conform to the embedding (internal triangulation)" + assert planar_drawing_conforms_to_embedding(embedding, pos_internally), msg + check_edge_intersections(embedding, pos_internally) + + +def is_close(a, b, rel_tol=1e-09, abs_tol=0.0): + # Check if float numbers are basically equal, for python >=3.5 there is + # function for that in the standard library + return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol) + + +def point_in_between(a, b, p): + # checks if p is on the line between a and b + x1, y1 = a + x2, y2 = b + px, py = p + dist_1_2 = math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2) + dist_1_p = math.sqrt((x1 - px) ** 2 + (y1 - py) ** 2) + dist_2_p = math.sqrt((x2 - px) ** 2 + (y2 - py) ** 2) + return is_close(dist_1_p + dist_2_p, dist_1_2) + + +def check_edge_intersections(G, pos): + """Check all edges in G for intersections. + + Raises an exception if an intersection is found. + + Parameters + ---------- + G : NetworkX graph + pos : dict + Maps every node to a tuple (x, y) representing its position + + """ + for a, b in G.edges(): + for c, d in G.edges(): + # Check if end points are different + if a != c and b != d and b != c and a != d: + x1, y1 = pos[a] + x2, y2 = pos[b] + x3, y3 = pos[c] + x4, y4 = pos[d] + determinant = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4) + if determinant != 0: # the lines are not parallel + # calculate intersection point, see: + # https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection + px = (x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * ( + x3 * y4 - y3 * x4 + ) / determinant + py = (x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * ( + x3 * y4 - y3 * x4 + ) / determinant + + # Check if intersection lies between the points + if point_in_between(pos[a], pos[b], (px, py)) and point_in_between( + pos[c], pos[d], (px, py) + ): + msg = f"There is an intersection at {px},{py}" + raise nx.NetworkXException(msg) + + # Check overlap + msg = "A node lies on a edge connecting two other nodes" + if ( + point_in_between(pos[a], pos[b], pos[c]) + or point_in_between(pos[a], pos[b], pos[d]) + or point_in_between(pos[c], pos[d], pos[a]) + or point_in_between(pos[c], pos[d], pos[b]) + ): + raise nx.NetworkXException(msg) + # No edge intersection found + + +class Vector: + """Compare vectors by their angle without loss of precision + + All vectors in direction [0, 1] are the smallest. + The vectors grow in clockwise direction. + """ + + __slots__ = ["x", "y", "node", "quadrant"] + + def __init__(self, x, y, node): + self.x = x + self.y = y + self.node = node + if self.x >= 0 and self.y > 0: + self.quadrant = 1 + elif self.x > 0 and self.y <= 0: + self.quadrant = 2 + elif self.x <= 0 and self.y < 0: + self.quadrant = 3 + else: + self.quadrant = 4 + + def __eq__(self, other): + return self.quadrant == other.quadrant and self.x * other.y == self.y * other.x + + def __lt__(self, other): + if self.quadrant < other.quadrant: + return True + elif self.quadrant > other.quadrant: + return False + else: + return self.x * other.y < self.y * other.x + + def __ne__(self, other): + return self != other + + def __le__(self, other): + return not other < self + + def __gt__(self, other): + return other < self + + def __ge__(self, other): + return not self < other + + +def planar_drawing_conforms_to_embedding(embedding, pos): + """Checks if pos conforms to the planar embedding + + Returns true iff the neighbors are actually oriented in the orientation + specified of the embedding + """ + for v in embedding: + nbr_vectors = [] + v_pos = pos[v] + for nbr in embedding[v]: + new_vector = Vector(pos[nbr][0] - v_pos[0], pos[nbr][1] - v_pos[1], nbr) + nbr_vectors.append(new_vector) + # Sort neighbors according to their phi angle + nbr_vectors.sort() + for idx, nbr_vector in enumerate(nbr_vectors): + cw_vector = nbr_vectors[(idx + 1) % len(nbr_vectors)] + ccw_vector = nbr_vectors[idx - 1] + if ( + embedding[v][nbr_vector.node]["cw"] != cw_vector.node + or embedding[v][nbr_vector.node]["ccw"] != ccw_vector.node + ): + return False + if cw_vector.node != nbr_vector.node and cw_vector == nbr_vector: + # Lines overlap + return False + if ccw_vector.node != nbr_vector.node and ccw_vector == nbr_vector: + # Lines overlap + return False + return True diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planarity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planarity.py new file mode 100644 index 0000000..af2d9a9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_planarity.py @@ -0,0 +1,556 @@ +import pytest + +import networkx as nx +from networkx.algorithms.planarity import ( + check_planarity_recursive, + get_counterexample, + get_counterexample_recursive, +) + + +class TestLRPlanarity: + """Nose Unit tests for the :mod:`networkx.algorithms.planarity` module. + + Tests three things: + 1. Check that the result is correct + (returns planar if and only if the graph is actually planar) + 2. In case a counter example is returned: Check if it is correct + 3. In case an embedding is returned: Check if its actually an embedding + """ + + @staticmethod + def check_graph(G, is_planar=None): + """Raises an exception if the lr_planarity check returns a wrong result + + Parameters + ---------- + G : NetworkX graph + is_planar : bool + The expected result of the planarity check. + If set to None only counter example or embedding are verified. + + """ + + # obtain results of planarity check + is_planar_lr, result = nx.check_planarity(G, True) + is_planar_lr_rec, result_rec = check_planarity_recursive(G, True) + + if is_planar is not None: + # set a message for the assert + if is_planar: + msg = "Wrong planarity check result. Should be planar." + else: + msg = "Wrong planarity check result. Should be non-planar." + + # check if the result is as expected + assert is_planar == is_planar_lr, msg + assert is_planar == is_planar_lr_rec, msg + + if is_planar_lr: + # check embedding + check_embedding(G, result) + check_embedding(G, result_rec) + else: + # check counter example + check_counterexample(G, result) + check_counterexample(G, result_rec) + + def test_simple_planar_graph(self): + e = [ + (1, 2), + (2, 3), + (3, 4), + (4, 6), + (6, 7), + (7, 1), + (1, 5), + (5, 2), + (2, 4), + (4, 5), + (5, 7), + ] + self.check_graph(nx.Graph(e), is_planar=True) + + def test_planar_with_selfloop(self): + e = [ + (1, 1), + (2, 2), + (3, 3), + (4, 4), + (5, 5), + (1, 2), + (1, 3), + (1, 5), + (2, 5), + (2, 4), + (3, 4), + (3, 5), + (4, 5), + ] + self.check_graph(nx.Graph(e), is_planar=True) + + def test_k3_3(self): + self.check_graph(nx.complete_bipartite_graph(3, 3), is_planar=False) + + def test_k5(self): + self.check_graph(nx.complete_graph(5), is_planar=False) + + def test_multiple_components_planar(self): + e = [(1, 2), (2, 3), (3, 1), (4, 5), (5, 6), (6, 4)] + self.check_graph(nx.Graph(e), is_planar=True) + + def test_multiple_components_non_planar(self): + G = nx.complete_graph(5) + # add another planar component to the non planar component + # G stays non planar + G.add_edges_from([(6, 7), (7, 8), (8, 6)]) + self.check_graph(G, is_planar=False) + + def test_non_planar_with_selfloop(self): + G = nx.complete_graph(5) + # add self loops + for i in range(5): + G.add_edge(i, i) + self.check_graph(G, is_planar=False) + + def test_non_planar1(self): + # tests a graph that has no subgraph directly isomorph to K5 or K3_3 + e = [ + (1, 5), + (1, 6), + (1, 7), + (2, 6), + (2, 3), + (3, 5), + (3, 7), + (4, 5), + (4, 6), + (4, 7), + ] + self.check_graph(nx.Graph(e), is_planar=False) + + def test_loop(self): + # test a graph with a selfloop + e = [(1, 2), (2, 2)] + G = nx.Graph(e) + self.check_graph(G, is_planar=True) + + def test_comp(self): + # test multiple component graph + e = [(1, 2), (3, 4)] + G = nx.Graph(e) + G.remove_edge(1, 2) + self.check_graph(G, is_planar=True) + + def test_goldner_harary(self): + # test goldner-harary graph (a maximal planar graph) + e = [ + (1, 2), + (1, 3), + (1, 4), + (1, 5), + (1, 7), + (1, 8), + (1, 10), + (1, 11), + (2, 3), + (2, 4), + (2, 6), + (2, 7), + (2, 9), + (2, 10), + (2, 11), + (3, 4), + (4, 5), + (4, 6), + (4, 7), + (5, 7), + (6, 7), + (7, 8), + (7, 9), + (7, 10), + (8, 10), + (9, 10), + (10, 11), + ] + G = nx.Graph(e) + self.check_graph(G, is_planar=True) + + def test_planar_multigraph(self): + G = nx.MultiGraph([(1, 2), (1, 2), (1, 2), (1, 2), (2, 3), (3, 1)]) + self.check_graph(G, is_planar=True) + + def test_non_planar_multigraph(self): + G = nx.MultiGraph(nx.complete_graph(5)) + G.add_edges_from([(1, 2)] * 5) + self.check_graph(G, is_planar=False) + + def test_planar_digraph(self): + G = nx.DiGraph([(1, 2), (2, 3), (2, 4), (4, 1), (4, 2), (1, 4), (3, 2)]) + self.check_graph(G, is_planar=True) + + def test_non_planar_digraph(self): + G = nx.DiGraph(nx.complete_graph(5)) + G.remove_edge(1, 2) + G.remove_edge(4, 1) + self.check_graph(G, is_planar=False) + + def test_single_component(self): + # Test a graph with only a single node + G = nx.Graph() + G.add_node(1) + self.check_graph(G, is_planar=True) + + def test_graph1(self): + G = nx.Graph( + [ + (3, 10), + (2, 13), + (1, 13), + (7, 11), + (0, 8), + (8, 13), + (0, 2), + (0, 7), + (0, 10), + (1, 7), + ] + ) + self.check_graph(G, is_planar=True) + + def test_graph2(self): + G = nx.Graph( + [ + (1, 2), + (4, 13), + (0, 13), + (4, 5), + (7, 10), + (1, 7), + (0, 3), + (2, 6), + (5, 6), + (7, 13), + (4, 8), + (0, 8), + (0, 9), + (2, 13), + (6, 7), + (3, 6), + (2, 8), + ] + ) + self.check_graph(G, is_planar=False) + + def test_graph3(self): + G = nx.Graph( + [ + (0, 7), + (3, 11), + (3, 4), + (8, 9), + (4, 11), + (1, 7), + (1, 13), + (1, 11), + (3, 5), + (5, 7), + (1, 3), + (0, 4), + (5, 11), + (5, 13), + ] + ) + self.check_graph(G, is_planar=False) + + def test_counterexample_planar(self): + with pytest.raises(nx.NetworkXException): + # Try to get a counterexample of a planar graph + G = nx.Graph() + G.add_node(1) + get_counterexample(G) + + def test_counterexample_planar_recursive(self): + with pytest.raises(nx.NetworkXException): + # Try to get a counterexample of a planar graph + G = nx.Graph() + G.add_node(1) + get_counterexample_recursive(G) + + def test_edge_removal_from_planar_embedding(self): + # PlanarEmbedding.check_structure() must succeed after edge removal + edges = ((0, 1), (1, 2), (2, 3), (3, 4), (4, 0), (0, 2), (0, 3)) + G = nx.Graph(edges) + cert, P = nx.check_planarity(G) + assert cert is True + P.remove_edge(0, 2) + self.check_graph(P, is_planar=True) + P.add_half_edge_ccw(1, 3, 2) + P.add_half_edge_cw(3, 1, 2) + self.check_graph(P, is_planar=True) + P.remove_edges_from(((0, 3), (1, 3))) + self.check_graph(P, is_planar=True) + + @pytest.mark.parametrize("graph_type", (nx.Graph, nx.MultiGraph)) + def test_graph_planar_embedding_to_undirected(self, graph_type): + G = graph_type([(0, 1), (0, 1), (1, 2), (2, 3), (3, 0), (0, 2)]) + is_planar, P = nx.check_planarity(G) + assert is_planar + U = P.to_undirected() + assert isinstance(U, nx.Graph) + assert all((d == {} for _, _, d in U.edges(data=True))) + + @pytest.mark.parametrize( + "reciprocal, as_view", [(True, True), (True, False), (False, True)] + ) + def test_planar_embedding_to_undirected_invalid_parameters( + self, reciprocal, as_view + ): + G = nx.Graph([(0, 1), (1, 2), (2, 3), (3, 0), (0, 2)]) + is_planar, P = nx.check_planarity(G) + assert is_planar + with pytest.raises(ValueError, match="is not supported for PlanarEmbedding."): + P.to_undirected(reciprocal=reciprocal, as_view=as_view) + + +def check_embedding(G, embedding): + """Raises an exception if the combinatorial embedding is not correct + + Parameters + ---------- + G : NetworkX graph + embedding : a dict mapping nodes to a list of edges + This specifies the ordering of the outgoing edges from a node for + a combinatorial embedding + + Notes + ----- + Checks the following things: + - The type of the embedding is correct + - The nodes and edges match the original graph + - Every half edge has its matching opposite half edge + - No intersections of edges (checked by Euler's formula) + """ + + if not isinstance(embedding, nx.PlanarEmbedding): + raise nx.NetworkXException("Bad embedding. Not of type nx.PlanarEmbedding") + + # Check structure + embedding.check_structure() + + # Check that graphs are equivalent + + assert set(G.nodes) == set(embedding.nodes), ( + "Bad embedding. Nodes don't match the original graph." + ) + + # Check that the edges are equal + g_edges = set() + for edge in G.edges: + if edge[0] != edge[1]: + g_edges.add((edge[0], edge[1])) + g_edges.add((edge[1], edge[0])) + assert g_edges == set(embedding.edges), ( + "Bad embedding. Edges don't match the original graph." + ) + + +def check_counterexample(G, sub_graph): + """Raises an exception if the counterexample is wrong. + + Parameters + ---------- + G : NetworkX graph + subdivision_nodes : set + A set of nodes inducing a subgraph as a counterexample + """ + # 1. Create the sub graph + sub_graph = nx.Graph(sub_graph) + + # 2. Remove self loops + for u in sub_graph: + if sub_graph.has_edge(u, u): + sub_graph.remove_edge(u, u) + + # keep track of nodes we might need to contract + contract = list(sub_graph) + + # 3. Contract Edges + while len(contract) > 0: + contract_node = contract.pop() + if contract_node not in sub_graph: + # Node was already contracted + continue + degree = sub_graph.degree[contract_node] + # Check if we can remove the node + if degree == 2: + # Get the two neighbors + neighbors = iter(sub_graph[contract_node]) + u = next(neighbors) + v = next(neighbors) + # Save nodes for later + contract.append(u) + contract.append(v) + # Contract edge + sub_graph.remove_node(contract_node) + sub_graph.add_edge(u, v) + + # 4. Check for isomorphism with K5 or K3_3 graphs + if len(sub_graph) == 5: + if not nx.is_isomorphic(nx.complete_graph(5), sub_graph): + raise nx.NetworkXException("Bad counter example.") + elif len(sub_graph) == 6: + if not nx.is_isomorphic(nx.complete_bipartite_graph(3, 3), sub_graph): + raise nx.NetworkXException("Bad counter example.") + else: + raise nx.NetworkXException("Bad counter example.") + + +class TestPlanarEmbeddingClass: + def test_add_half_edge(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge(0, 1) + with pytest.raises( + nx.NetworkXException, match="Invalid clockwise reference node." + ): + embedding.add_half_edge(0, 2, cw=3) + with pytest.raises( + nx.NetworkXException, match="Invalid counterclockwise reference node." + ): + embedding.add_half_edge(0, 2, ccw=3) + with pytest.raises( + nx.NetworkXException, match="Only one of cw/ccw can be specified." + ): + embedding.add_half_edge(0, 2, cw=1, ccw=1) + with pytest.raises( + nx.NetworkXException, + match=( + r"Node already has out-half-edge\(s\), either" + " cw or ccw reference node required." + ), + ): + embedding.add_half_edge(0, 2) + # these should work + embedding.add_half_edge(0, 2, cw=1) + embedding.add_half_edge(0, 3, ccw=1) + assert sorted(embedding.edges(data=True)) == [ + (0, 1, {"ccw": 2, "cw": 3}), + (0, 2, {"cw": 1, "ccw": 3}), + (0, 3, {"cw": 2, "ccw": 1}), + ] + + def test_get_data(self): + embedding = self.get_star_embedding(4) + data = embedding.get_data() + data_cmp = {0: [3, 2, 1], 1: [0], 2: [0], 3: [0]} + assert data == data_cmp + + def test_edge_removal(self): + embedding = nx.PlanarEmbedding() + embedding.set_data( + { + 1: [2, 5, 7], + 2: [1, 3, 4, 5], + 3: [2, 4], + 4: [3, 6, 5, 2], + 5: [7, 1, 2, 4], + 6: [4, 7], + 7: [6, 1, 5], + } + ) + # remove_edges_from() calls remove_edge(), so both are tested here + embedding.remove_edges_from(((5, 4), (1, 5))) + embedding.check_structure() + embedding_expected = nx.PlanarEmbedding() + embedding_expected.set_data( + { + 1: [2, 7], + 2: [1, 3, 4, 5], + 3: [2, 4], + 4: [3, 6, 2], + 5: [7, 2], + 6: [4, 7], + 7: [6, 1, 5], + } + ) + assert nx.utils.graphs_equal(embedding, embedding_expected) + + def test_missing_edge_orientation(self): + embedding = nx.PlanarEmbedding({1: {2: {}}, 2: {1: {}}}) + with pytest.raises(nx.NetworkXException): + # Invalid structure because the orientation of the edge was not set + embedding.check_structure() + + def test_invalid_edge_orientation(self): + embedding = nx.PlanarEmbedding( + { + 1: {2: {"cw": 2, "ccw": 2}}, + 2: {1: {"cw": 1, "ccw": 1}}, + 1: {3: {}}, + 3: {1: {}}, + } + ) + with pytest.raises(nx.NetworkXException): + embedding.check_structure() + + def test_missing_half_edge(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge(1, 2) + with pytest.raises(nx.NetworkXException): + # Invalid structure because other half edge is missing + embedding.check_structure() + + def test_not_fulfilling_euler_formula(self): + embedding = nx.PlanarEmbedding() + for i in range(5): + ref = None + for j in range(5): + if i != j: + embedding.add_half_edge(i, j, cw=ref) + ref = j + with pytest.raises(nx.NetworkXException): + embedding.check_structure() + + def test_missing_reference(self): + embedding = nx.PlanarEmbedding() + with pytest.raises(nx.NetworkXException, match="Invalid reference node."): + embedding.add_half_edge(1, 2, ccw=3) + + def test_connect_components(self): + embedding = nx.PlanarEmbedding() + embedding.connect_components(1, 2) + + def test_successful_face_traversal(self): + embedding = nx.PlanarEmbedding() + embedding.add_half_edge(1, 2) + embedding.add_half_edge(2, 1) + face = embedding.traverse_face(1, 2) + assert face == [1, 2] + + def test_unsuccessful_face_traversal(self): + embedding = nx.PlanarEmbedding( + {1: {2: {"cw": 3, "ccw": 2}}, 2: {1: {"cw": 3, "ccw": 1}}} + ) + with pytest.raises(nx.NetworkXException): + embedding.traverse_face(1, 2) + + def test_forbidden_methods(self): + embedding = nx.PlanarEmbedding() + embedding.add_node(42) # no exception + embedding.add_nodes_from([(23, 24)]) # no exception + with pytest.raises(NotImplementedError): + embedding.add_edge(1, 3) + with pytest.raises(NotImplementedError): + embedding.add_edges_from([(0, 2), (1, 4)]) + with pytest.raises(NotImplementedError): + embedding.add_weighted_edges_from([(0, 2, 350), (1, 4, 125)]) + + @staticmethod + def get_star_embedding(n): + embedding = nx.PlanarEmbedding() + ref = None + for i in range(1, n): + embedding.add_half_edge(0, i, cw=ref) + ref = i + embedding.add_half_edge(i, 0) + return embedding diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_polynomials.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_polynomials.py new file mode 100644 index 0000000..a81d6a6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_polynomials.py @@ -0,0 +1,57 @@ +"""Unit tests for the :mod:`networkx.algorithms.polynomials` module.""" + +import pytest + +import networkx as nx + +sympy = pytest.importorskip("sympy") + + +# Mapping of input graphs to a string representation of their tutte polynomials +_test_tutte_graphs = { + nx.complete_graph(1): "1", + nx.complete_graph(4): "x**3 + 3*x**2 + 4*x*y + 2*x + y**3 + 3*y**2 + 2*y", + nx.cycle_graph(5): "x**4 + x**3 + x**2 + x + y", + nx.diamond_graph(): "x**3 + 2*x**2 + 2*x*y + x + y**2 + y", +} + +_test_chromatic_graphs = { + nx.complete_graph(1): "x", + nx.complete_graph(4): "x**4 - 6*x**3 + 11*x**2 - 6*x", + nx.cycle_graph(5): "x**5 - 5*x**4 + 10*x**3 - 10*x**2 + 4*x", + nx.diamond_graph(): "x**4 - 5*x**3 + 8*x**2 - 4*x", + nx.path_graph(5): "x**5 - 4*x**4 + 6*x**3 - 4*x**2 + x", +} + + +@pytest.mark.parametrize(("G", "expected"), _test_tutte_graphs.items()) +def test_tutte_polynomial(G, expected): + assert nx.tutte_polynomial(G).equals(expected) + + +@pytest.mark.parametrize("G", _test_tutte_graphs.keys()) +def test_tutte_polynomial_disjoint(G): + """Tutte polynomial factors into the Tutte polynomials of its components. + Verify this property with the disjoint union of two copies of the input graph. + """ + t_g = nx.tutte_polynomial(G) + H = nx.disjoint_union(G, G) + t_h = nx.tutte_polynomial(H) + assert sympy.simplify(t_g * t_g).equals(t_h) + + +@pytest.mark.parametrize(("G", "expected"), _test_chromatic_graphs.items()) +def test_chromatic_polynomial(G, expected): + assert nx.chromatic_polynomial(G).equals(expected) + + +@pytest.mark.parametrize("G", _test_chromatic_graphs.keys()) +def test_chromatic_polynomial_disjoint(G): + """Chromatic polynomial factors into the Chromatic polynomials of its + components. Verify this property with the disjoint union of two copies of + the input graph. + """ + x_g = nx.chromatic_polynomial(G) + H = nx.disjoint_union(G, G) + x_h = nx.chromatic_polynomial(H) + assert sympy.simplify(x_g * x_g).equals(x_h) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_reciprocity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_reciprocity.py new file mode 100644 index 0000000..e713bc4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_reciprocity.py @@ -0,0 +1,37 @@ +import pytest + +import networkx as nx + + +class TestReciprocity: + # test overall reciprocity by passing whole graph + def test_reciprocity_digraph(self): + DG = nx.DiGraph([(1, 2), (2, 1)]) + reciprocity = nx.reciprocity(DG) + assert reciprocity == 1.0 + + # test empty graph's overall reciprocity which will throw an error + def test_overall_reciprocity_empty_graph(self): + with pytest.raises(nx.NetworkXError): + DG = nx.DiGraph() + nx.overall_reciprocity(DG) + + # test for reciprocity for a list of nodes + def test_reciprocity_graph_nodes(self): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 2)]) + reciprocity = nx.reciprocity(DG, [1, 2]) + expected_reciprocity = {1: 0.0, 2: 0.6666666666666666} + assert reciprocity == expected_reciprocity + + # test for reciprocity for a single node + def test_reciprocity_graph_node(self): + DG = nx.DiGraph([(1, 2), (2, 3), (3, 2)]) + reciprocity = nx.reciprocity(DG, 2) + assert reciprocity == 0.6666666666666666 + + # test for reciprocity for an isolated node + def test_reciprocity_graph_isolated_nodes(self): + with pytest.raises(nx.NetworkXError): + DG = nx.DiGraph([(1, 2)]) + DG.add_node(4) + nx.reciprocity(DG, 4) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_regular.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_regular.py new file mode 100644 index 0000000..021ad32 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_regular.py @@ -0,0 +1,91 @@ +import pytest + +import networkx as nx +import networkx.algorithms.regular as reg +import networkx.generators as gen + + +class TestKFactor: + def test_k_factor_trivial(self): + g = gen.cycle_graph(4) + f = reg.k_factor(g, 2) + assert g.edges == f.edges + + def test_k_factor1(self): + g = gen.grid_2d_graph(4, 4) + g_kf = reg.k_factor(g, 2) + for edge in g_kf.edges(): + assert g.has_edge(edge[0], edge[1]) + for _, degree in g_kf.degree(): + assert degree == 2 + + def test_k_factor2(self): + g = gen.complete_graph(6) + g_kf = reg.k_factor(g, 3) + for edge in g_kf.edges(): + assert g.has_edge(edge[0], edge[1]) + for _, degree in g_kf.degree(): + assert degree == 3 + + def test_k_factor3(self): + g = gen.grid_2d_graph(4, 4) + with pytest.raises(nx.NetworkXUnfeasible): + reg.k_factor(g, 3) + + def test_k_factor4(self): + g = gen.lattice.hexagonal_lattice_graph(4, 4) + # Perfect matching doesn't exist for 4,4 hexagonal lattice graph + with pytest.raises(nx.NetworkXUnfeasible): + reg.k_factor(g, 2) + + def test_k_factor5(self): + g = gen.complete_graph(6) + # small k to exercise SmallKGadget + g_kf = reg.k_factor(g, 2) + for edge in g_kf.edges(): + assert g.has_edge(edge[0], edge[1]) + for _, degree in g_kf.degree(): + assert degree == 2 + + +class TestIsRegular: + def test_is_regular1(self): + g = gen.cycle_graph(4) + assert reg.is_regular(g) + + def test_is_regular2(self): + g = gen.complete_graph(5) + assert reg.is_regular(g) + + def test_is_regular3(self): + g = gen.lollipop_graph(5, 5) + assert not reg.is_regular(g) + + def test_is_regular4(self): + g = nx.DiGraph() + g.add_edges_from([(0, 1), (1, 2), (2, 0)]) + assert reg.is_regular(g) + + +def test_is_regular_empty_graph_raises(): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept, match="Graph has no nodes"): + nx.is_regular(G) + + +class TestIsKRegular: + def test_is_k_regular1(self): + g = gen.cycle_graph(4) + assert reg.is_k_regular(g, 2) + assert not reg.is_k_regular(g, 3) + + def test_is_k_regular2(self): + g = gen.complete_graph(5) + assert reg.is_k_regular(g, 4) + assert not reg.is_k_regular(g, 3) + assert not reg.is_k_regular(g, 6) + + def test_is_k_regular3(self): + g = gen.lollipop_graph(5, 5) + assert not reg.is_k_regular(g, 5) + assert not reg.is_k_regular(g, 6) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_richclub.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_richclub.py new file mode 100644 index 0000000..2172157 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_richclub.py @@ -0,0 +1,149 @@ +import pytest + +import networkx as nx + + +def test_richclub(): + G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) + rc = nx.richclub.rich_club_coefficient(G, normalized=False) + assert rc == {0: 12.0 / 30, 1: 8.0 / 12} + + # test single value + rc0 = nx.richclub.rich_club_coefficient(G, normalized=False)[0] + assert rc0 == 12.0 / 30.0 + + +def test_richclub_seed(): + G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) + rcNorm = nx.richclub.rich_club_coefficient(G, Q=2, seed=1) + assert rcNorm == {0: 1.0, 1: 1.0} + + +def test_richclub_normalized(): + G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)]) + rcNorm = nx.richclub.rich_club_coefficient(G, Q=2, seed=42) + assert rcNorm == {0: 1.0, 1: 1.0} + + +def test_richclub2(): + T = nx.balanced_tree(2, 10) + rc = nx.richclub.rich_club_coefficient(T, normalized=False) + assert rc == { + 0: 4092 / (2047 * 2046.0), + 1: (2044.0 / (1023 * 1022)), + 2: (2040.0 / (1022 * 1021)), + } + + +def test_richclub3(): + # tests edgecase + G = nx.karate_club_graph() + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == { + 0: 156.0 / 1122, + 1: 154.0 / 1056, + 2: 110.0 / 462, + 3: 78.0 / 240, + 4: 44.0 / 90, + 5: 22.0 / 42, + 6: 10.0 / 20, + 7: 10.0 / 20, + 8: 10.0 / 20, + 9: 6.0 / 12, + 10: 2.0 / 6, + 11: 2.0 / 6, + 12: 0.0, + 13: 0.0, + 14: 0.0, + 15: 0.0, + } + + +def test_richclub4(): + G = nx.Graph() + G.add_edges_from( + [(0, 1), (0, 2), (0, 3), (0, 4), (4, 5), (5, 9), (6, 9), (7, 9), (8, 9)] + ) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 18 / 90.0, 1: 6 / 12.0, 2: 0.0, 3: 0.0} + + +def test_richclub_exception(): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + nx.rich_club_coefficient(G) + + +def test_rich_club_exception2(): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.MultiGraph() + nx.rich_club_coefficient(G) + + +def test_rich_club_selfloop(): + G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + G.add_edge(1, 1) # self loop + G.add_edge(1, 2) + with pytest.raises( + Exception, + match="rich_club_coefficient is not implemented for graphs with self loops.", + ): + nx.rich_club_coefficient(G) + + +def test_rich_club_leq_3_nodes_unnormalized(): + # edgeless graphs upto 3 nodes + G = nx.Graph() + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {} + + for i in range(3): + G.add_node(i) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {} + + # 2 nodes, single edge + G = nx.Graph() + G.add_edge(0, 1) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1} + + # 3 nodes, single edge + G = nx.Graph() + G.add_nodes_from([0, 1, 2]) + G.add_edge(0, 1) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1} + + # 3 nodes, 2 edges + G.add_edge(1, 2) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 2 / 3} + + # 3 nodes, 3 edges + G.add_edge(0, 2) + rc = nx.rich_club_coefficient(G, normalized=False) + assert rc == {0: 1, 1: 1} + + +def test_rich_club_leq_3_nodes_normalized(): + G = nx.Graph() + with pytest.raises( + nx.exception.NetworkXError, + match="Graph has fewer than four nodes", + ): + rc = nx.rich_club_coefficient(G, normalized=True) + + for i in range(3): + G.add_node(i) + with pytest.raises( + nx.exception.NetworkXError, + match="Graph has fewer than four nodes", + ): + rc = nx.rich_club_coefficient(G, normalized=True) + + +# def test_richclub2_normalized(): +# T = nx.balanced_tree(2,10) +# rcNorm = nx.richclub.rich_club_coefficient(T,Q=2) +# assert_true(rcNorm[0] ==1.0 and rcNorm[1] < 0.9 and rcNorm[2] < 0.9) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_similarity.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_similarity.py new file mode 100644 index 0000000..81d870a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_similarity.py @@ -0,0 +1,984 @@ +import pytest + +import networkx as nx +from networkx.algorithms.similarity import ( + graph_edit_distance, + optimal_edit_paths, + optimize_graph_edit_distance, +) +from networkx.generators.classic import ( + circular_ladder_graph, + cycle_graph, + path_graph, + wheel_graph, +) + + +@pytest.mark.parametrize("source", (10, "foo")) +def test_generate_random_paths_source_not_in_G(source): + pytest.importorskip("numpy") + G = nx.complete_graph(5) + # No exception at generator construction time + path_gen = nx.generate_random_paths(G, sample_size=3, source=source) + with pytest.raises(nx.NodeNotFound, match="Initial node.*not in G"): + next(path_gen) + + +def nmatch(n1, n2): + return n1 == n2 + + +def ematch(e1, e2): + return e1 == e2 + + +def getCanonical(): + G = nx.Graph() + G.add_node("A", label="A") + G.add_node("B", label="B") + G.add_node("C", label="C") + G.add_node("D", label="D") + G.add_edge("A", "B", label="a-b") + G.add_edge("B", "C", label="b-c") + G.add_edge("B", "D", label="b-d") + return G + + +class TestSimilarity: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + + def test_graph_edit_distance_roots_and_timeout(self): + G0 = nx.star_graph(5) + G1 = G0.copy() + pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2]) + pytest.raises(ValueError, graph_edit_distance, G0, G1, roots=[2, 3, 4]) + pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 3)) + pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(3, 9)) + pytest.raises(nx.NodeNotFound, graph_edit_distance, G0, G1, roots=(9, 9)) + assert graph_edit_distance(G0, G1, roots=(1, 2)) == 0 + assert graph_edit_distance(G0, G1, roots=(0, 1)) == 8 + assert graph_edit_distance(G0, G1, roots=(1, 2), timeout=5) == 0 + assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=5) == 8 + assert graph_edit_distance(G0, G1, roots=(0, 1), timeout=0.0001) is None + # test raise on 0 timeout + pytest.raises(nx.NetworkXError, graph_edit_distance, G0, G1, timeout=0) + + def test_graph_edit_distance(self): + G0 = nx.Graph() + G1 = path_graph(6) + G2 = cycle_graph(6) + G3 = wheel_graph(7) + + assert graph_edit_distance(G0, G0) == 0 + assert graph_edit_distance(G0, G1) == 11 + assert graph_edit_distance(G1, G0) == 11 + assert graph_edit_distance(G0, G2) == 12 + assert graph_edit_distance(G2, G0) == 12 + assert graph_edit_distance(G0, G3) == 19 + assert graph_edit_distance(G3, G0) == 19 + + assert graph_edit_distance(G1, G1) == 0 + assert graph_edit_distance(G1, G2) == 1 + assert graph_edit_distance(G2, G1) == 1 + assert graph_edit_distance(G1, G3) == 8 + assert graph_edit_distance(G3, G1) == 8 + + assert graph_edit_distance(G2, G2) == 0 + assert graph_edit_distance(G2, G3) == 7 + assert graph_edit_distance(G3, G2) == 7 + + assert graph_edit_distance(G3, G3) == 0 + + def test_graph_edit_distance_node_match(self): + G1 = cycle_graph(5) + G2 = cycle_graph(5) + for n, attr in G1.nodes.items(): + attr["color"] = "red" if n % 2 == 0 else "blue" + for n, attr in G2.nodes.items(): + attr["color"] = "red" if n % 2 == 1 else "blue" + assert graph_edit_distance(G1, G2) == 0 + assert ( + graph_edit_distance( + G1, G2, node_match=lambda n1, n2: n1["color"] == n2["color"] + ) + == 1 + ) + + def test_graph_edit_distance_edge_match(self): + G1 = path_graph(6) + G2 = path_graph(6) + for e, attr in G1.edges.items(): + attr["color"] = "red" if min(e) % 2 == 0 else "blue" + for e, attr in G2.edges.items(): + attr["color"] = "red" if min(e) // 3 == 0 else "blue" + assert graph_edit_distance(G1, G2) == 0 + assert ( + graph_edit_distance( + G1, G2, edge_match=lambda e1, e2: e1["color"] == e2["color"] + ) + == 2 + ) + + def test_graph_edit_distance_node_cost(self): + G1 = path_graph(6) + G2 = path_graph(6) + for n, attr in G1.nodes.items(): + attr["color"] = "red" if n % 2 == 0 else "blue" + for n, attr in G2.nodes.items(): + attr["color"] = "red" if n % 2 == 1 else "blue" + + def node_subst_cost(uattr, vattr): + if uattr["color"] == vattr["color"]: + return 1 + else: + return 10 + + def node_del_cost(attr): + if attr["color"] == "blue": + return 20 + else: + return 50 + + def node_ins_cost(attr): + if attr["color"] == "blue": + return 40 + else: + return 100 + + assert ( + graph_edit_distance( + G1, + G2, + node_subst_cost=node_subst_cost, + node_del_cost=node_del_cost, + node_ins_cost=node_ins_cost, + ) + == 6 + ) + + def test_graph_edit_distance_edge_cost(self): + G1 = path_graph(6) + G2 = path_graph(6) + for e, attr in G1.edges.items(): + attr["color"] = "red" if min(e) % 2 == 0 else "blue" + for e, attr in G2.edges.items(): + attr["color"] = "red" if min(e) // 3 == 0 else "blue" + + def edge_subst_cost(gattr, hattr): + if gattr["color"] == hattr["color"]: + return 0.01 + else: + return 0.1 + + def edge_del_cost(attr): + if attr["color"] == "blue": + return 0.2 + else: + return 0.5 + + def edge_ins_cost(attr): + if attr["color"] == "blue": + return 0.4 + else: + return 1.0 + + assert ( + graph_edit_distance( + G1, + G2, + edge_subst_cost=edge_subst_cost, + edge_del_cost=edge_del_cost, + edge_ins_cost=edge_ins_cost, + ) + == 0.23 + ) + + def test_graph_edit_distance_upper_bound(self): + G1 = circular_ladder_graph(2) + G2 = circular_ladder_graph(6) + assert graph_edit_distance(G1, G2, upper_bound=5) is None + assert graph_edit_distance(G1, G2, upper_bound=24) == 22 + assert graph_edit_distance(G1, G2) == 22 + + def test_optimal_edit_paths(self): + G1 = path_graph(3) + G2 = cycle_graph(3) + paths, cost = optimal_edit_paths(G1, G2) + assert cost == 1 + assert len(paths) == 6 + + def canonical(vertex_path, edge_path): + return ( + tuple(sorted(vertex_path)), + tuple(sorted(edge_path, key=lambda x: (None in x, x))), + ) + + expected_paths = [ + ( + [(0, 0), (1, 1), (2, 2)], + [((0, 1), (0, 1)), ((1, 2), (1, 2)), (None, (0, 2))], + ), + ( + [(0, 0), (1, 2), (2, 1)], + [((0, 1), (0, 2)), ((1, 2), (1, 2)), (None, (0, 1))], + ), + ( + [(0, 1), (1, 0), (2, 2)], + [((0, 1), (0, 1)), ((1, 2), (0, 2)), (None, (1, 2))], + ), + ( + [(0, 1), (1, 2), (2, 0)], + [((0, 1), (1, 2)), ((1, 2), (0, 2)), (None, (0, 1))], + ), + ( + [(0, 2), (1, 0), (2, 1)], + [((0, 1), (0, 2)), ((1, 2), (0, 1)), (None, (1, 2))], + ), + ( + [(0, 2), (1, 1), (2, 0)], + [((0, 1), (1, 2)), ((1, 2), (0, 1)), (None, (0, 2))], + ), + ] + assert {canonical(*p) for p in paths} == {canonical(*p) for p in expected_paths} + + def test_optimize_graph_edit_distance(self): + G1 = circular_ladder_graph(2) + G2 = circular_ladder_graph(6) + bestcost = 1000 + for cost in optimize_graph_edit_distance(G1, G2): + assert cost < bestcost + bestcost = cost + assert bestcost == 22 + + # def test_graph_edit_distance_bigger(self): + # G1 = circular_ladder_graph(12) + # G2 = circular_ladder_graph(16) + # assert_equal(graph_edit_distance(G1, G2), 22) + + def test_selfloops(self): + G0 = nx.Graph() + G1 = nx.Graph() + G1.add_edges_from((("A", "A"), ("A", "B"))) + G2 = nx.Graph() + G2.add_edges_from((("A", "B"), ("B", "B"))) + G3 = nx.Graph() + G3.add_edges_from((("A", "A"), ("A", "B"), ("B", "B"))) + + assert graph_edit_distance(G0, G0) == 0 + assert graph_edit_distance(G0, G1) == 4 + assert graph_edit_distance(G1, G0) == 4 + assert graph_edit_distance(G0, G2) == 4 + assert graph_edit_distance(G2, G0) == 4 + assert graph_edit_distance(G0, G3) == 5 + assert graph_edit_distance(G3, G0) == 5 + + assert graph_edit_distance(G1, G1) == 0 + assert graph_edit_distance(G1, G2) == 0 + assert graph_edit_distance(G2, G1) == 0 + assert graph_edit_distance(G1, G3) == 1 + assert graph_edit_distance(G3, G1) == 1 + + assert graph_edit_distance(G2, G2) == 0 + assert graph_edit_distance(G2, G3) == 1 + assert graph_edit_distance(G3, G2) == 1 + + assert graph_edit_distance(G3, G3) == 0 + + def test_digraph(self): + G0 = nx.DiGraph() + G1 = nx.DiGraph() + G1.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("D", "A"))) + G2 = nx.DiGraph() + G2.add_edges_from((("A", "B"), ("B", "C"), ("C", "D"), ("A", "D"))) + G3 = nx.DiGraph() + G3.add_edges_from((("A", "B"), ("A", "C"), ("B", "D"), ("C", "D"))) + + assert graph_edit_distance(G0, G0) == 0 + assert graph_edit_distance(G0, G1) == 8 + assert graph_edit_distance(G1, G0) == 8 + assert graph_edit_distance(G0, G2) == 8 + assert graph_edit_distance(G2, G0) == 8 + assert graph_edit_distance(G0, G3) == 8 + assert graph_edit_distance(G3, G0) == 8 + + assert graph_edit_distance(G1, G1) == 0 + assert graph_edit_distance(G1, G2) == 2 + assert graph_edit_distance(G2, G1) == 2 + assert graph_edit_distance(G1, G3) == 4 + assert graph_edit_distance(G3, G1) == 4 + + assert graph_edit_distance(G2, G2) == 0 + assert graph_edit_distance(G2, G3) == 2 + assert graph_edit_distance(G3, G2) == 2 + + assert graph_edit_distance(G3, G3) == 0 + + def test_multigraph(self): + G0 = nx.MultiGraph() + G1 = nx.MultiGraph() + G1.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"))) + G2 = nx.MultiGraph() + G2.add_edges_from((("A", "B"), ("B", "C"), ("B", "C"), ("A", "C"))) + G3 = nx.MultiGraph() + G3.add_edges_from((("A", "B"), ("B", "C"), ("A", "C"), ("A", "C"), ("A", "C"))) + + assert graph_edit_distance(G0, G0) == 0 + assert graph_edit_distance(G0, G1) == 6 + assert graph_edit_distance(G1, G0) == 6 + assert graph_edit_distance(G0, G2) == 7 + assert graph_edit_distance(G2, G0) == 7 + assert graph_edit_distance(G0, G3) == 8 + assert graph_edit_distance(G3, G0) == 8 + + assert graph_edit_distance(G1, G1) == 0 + assert graph_edit_distance(G1, G2) == 1 + assert graph_edit_distance(G2, G1) == 1 + assert graph_edit_distance(G1, G3) == 2 + assert graph_edit_distance(G3, G1) == 2 + + assert graph_edit_distance(G2, G2) == 0 + assert graph_edit_distance(G2, G3) == 1 + assert graph_edit_distance(G3, G2) == 1 + + assert graph_edit_distance(G3, G3) == 0 + + def test_multidigraph(self): + G1 = nx.MultiDiGraph() + G1.add_edges_from( + ( + ("hardware", "kernel"), + ("kernel", "hardware"), + ("kernel", "userspace"), + ("userspace", "kernel"), + ) + ) + G2 = nx.MultiDiGraph() + G2.add_edges_from( + ( + ("winter", "spring"), + ("spring", "summer"), + ("summer", "autumn"), + ("autumn", "winter"), + ) + ) + + assert graph_edit_distance(G1, G2) == 5 + assert graph_edit_distance(G2, G1) == 5 + + # by https://github.com/jfbeaumont + def testCopy(self): + G = nx.Graph() + G.add_node("A", label="A") + G.add_node("B", label="B") + G.add_edge("A", "B", label="a-b") + assert ( + graph_edit_distance(G, G.copy(), node_match=nmatch, edge_match=ematch) == 0 + ) + + def testSame(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 0 + + def testOneEdgeLabelDiff(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="bad") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def testOneNodeLabelDiff(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="Z") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def testOneExtraNode(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_edge("A", "B", label="a-b") + G2.add_node("C", label="C") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def testOneExtraEdge(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_node("C", label="C") + G1.add_node("C", label="C") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("A", "C", label="a-c") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + def testOneExtraNodeAndEdge(self): + G1 = nx.Graph() + G1.add_node("A", label="A") + G1.add_node("B", label="B") + G1.add_edge("A", "B", label="a-b") + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("A", "C", label="a-c") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 + + def testGraph1(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "D", label="b-d") + G2.add_edge("D", "E", label="d-e") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 3 + + def testGraph2(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("C", "D", label="c-d") + G2.add_edge("C", "E", label="c-e") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 4 + + def testGraph3(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_node("E", label="E") + G2.add_node("F", label="F") + G2.add_node("G", label="G") + G2.add_edge("A", "C", label="a-c") + G2.add_edge("A", "D", label="a-d") + G2.add_edge("D", "E", label="d-e") + G2.add_edge("D", "F", label="d-f") + G2.add_edge("D", "G", label="d-g") + G2.add_edge("E", "B", label="e-b") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 12 + + def testGraph4(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("C", "D", label="c-d") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 + + def testGraph4_a(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("A", "D", label="a-d") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 2 + + def testGraph4_b(self): + G1 = getCanonical() + G2 = nx.Graph() + G2.add_node("A", label="A") + G2.add_node("B", label="B") + G2.add_node("C", label="C") + G2.add_node("D", label="D") + G2.add_edge("A", "B", label="a-b") + G2.add_edge("B", "C", label="b-c") + G2.add_edge("B", "D", label="bad") + assert graph_edit_distance(G1, G2, node_match=nmatch, edge_match=ematch) == 1 + + # note: nx.simrank_similarity_numpy not included because returns np.array + simrank_algs = [ + nx.simrank_similarity, + nx.algorithms.similarity._simrank_similarity_python, + ] + + @pytest.mark.parametrize("simrank_similarity", simrank_algs) + def test_simrank_no_source_no_target(self, simrank_similarity): + G = nx.cycle_graph(5) + expected = { + 0: { + 0: 1, + 1: 0.3951219505902448, + 2: 0.5707317069281646, + 3: 0.5707317069281646, + 4: 0.3951219505902449, + }, + 1: { + 0: 0.3951219505902448, + 1: 1, + 2: 0.3951219505902449, + 3: 0.5707317069281646, + 4: 0.5707317069281646, + }, + 2: { + 0: 0.5707317069281646, + 1: 0.3951219505902449, + 2: 1, + 3: 0.3951219505902449, + 4: 0.5707317069281646, + }, + 3: { + 0: 0.5707317069281646, + 1: 0.5707317069281646, + 2: 0.3951219505902449, + 3: 1, + 4: 0.3951219505902449, + }, + 4: { + 0: 0.3951219505902449, + 1: 0.5707317069281646, + 2: 0.5707317069281646, + 3: 0.3951219505902449, + 4: 1, + }, + } + actual = simrank_similarity(G) + for k, v in expected.items(): + assert v == pytest.approx(actual[k], abs=1e-2) + + # For a DiGraph test, use the first graph from the paper cited in + # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 + G = nx.DiGraph() + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) + + expected = { + 0: {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443}, + 1: {0: 0.0, 1: 1, 2: 0.4135512472705618, 3: 0.0, 4: 0.10586911930126384}, + 2: { + 0: 0.1323363991265798, + 1: 0.4135512472705618, + 2: 1, + 3: 0.04234764772050554, + 4: 0.08822426608438655, + }, + 3: {0: 0.0, 1: 0.0, 2: 0.04234764772050554, 3: 1, 4: 0.3308409978164495}, + 4: { + 0: 0.03387811817640443, + 1: 0.10586911930126384, + 2: 0.08822426608438655, + 3: 0.3308409978164495, + 4: 1, + }, + } + # Use the importance_factor from the paper to get the same numbers. + actual = simrank_similarity(G, importance_factor=0.8) + for k, v in expected.items(): + assert v == pytest.approx(actual[k], abs=1e-2) + + @pytest.mark.parametrize("simrank_similarity", simrank_algs) + def test_simrank_source_no_target(self, simrank_similarity): + G = nx.cycle_graph(5) + expected = { + 0: 1, + 1: 0.3951219505902448, + 2: 0.5707317069281646, + 3: 0.5707317069281646, + 4: 0.3951219505902449, + } + actual = simrank_similarity(G, source=0) + assert expected == pytest.approx(actual, abs=1e-2) + + # For a DiGraph test, use the first graph from the paper cited in + # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 + G = nx.DiGraph() + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) + + expected = {0: 1, 1: 0.0, 2: 0.1323363991265798, 3: 0.0, 4: 0.03387811817640443} + # Use the importance_factor from the paper to get the same numbers. + actual = simrank_similarity(G, importance_factor=0.8, source=0) + assert expected == pytest.approx(actual, abs=1e-2) + + @pytest.mark.parametrize("simrank_similarity", simrank_algs) + def test_simrank_noninteger_nodes(self, simrank_similarity): + G = nx.cycle_graph(5) + G = nx.relabel_nodes(G, dict(enumerate("abcde"))) + expected = { + "a": 1, + "b": 0.3951219505902448, + "c": 0.5707317069281646, + "d": 0.5707317069281646, + "e": 0.3951219505902449, + } + actual = simrank_similarity(G, source="a") + assert expected == pytest.approx(actual, abs=1e-2) + + # For a DiGraph test, use the first graph from the paper cited in + # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 + G = nx.DiGraph() + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) + node_labels = dict(enumerate(nx.get_node_attributes(G, "label").values())) + G = nx.relabel_nodes(G, node_labels) + + expected = { + "Univ": 1, + "ProfA": 0.0, + "ProfB": 0.1323363991265798, + "StudentA": 0.0, + "StudentB": 0.03387811817640443, + } + # Use the importance_factor from the paper to get the same numbers. + actual = simrank_similarity(G, importance_factor=0.8, source="Univ") + assert expected == pytest.approx(actual, abs=1e-2) + + @pytest.mark.parametrize("simrank_similarity", simrank_algs) + def test_simrank_source_and_target(self, simrank_similarity): + G = nx.cycle_graph(5) + expected = 1 + actual = simrank_similarity(G, source=0, target=0) + assert expected == pytest.approx(actual, abs=1e-2) + + # For a DiGraph test, use the first graph from the paper cited in + # the docs: https://dl.acm.org/doi/pdf/10.1145/775047.775126 + G = nx.DiGraph() + G.add_node(0, label="Univ") + G.add_node(1, label="ProfA") + G.add_node(2, label="ProfB") + G.add_node(3, label="StudentA") + G.add_node(4, label="StudentB") + G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 4), (4, 2), (3, 0)]) + + expected = 0.1323363991265798 + # Use the importance_factor from the paper to get the same numbers. + # Use the pair (0,2) because (0,0) and (0,1) have trivial results. + actual = simrank_similarity(G, importance_factor=0.8, source=0, target=2) + assert expected == pytest.approx(actual, abs=1e-5) + + @pytest.mark.parametrize("alg", simrank_algs) + def test_simrank_max_iterations(self, alg): + G = nx.cycle_graph(5) + pytest.raises(nx.ExceededMaxIterations, alg, G, max_iterations=10) + + def test_simrank_source_not_found(self): + G = nx.cycle_graph(5) + with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"): + nx.simrank_similarity(G, source=10) + + def test_simrank_target_not_found(self): + G = nx.cycle_graph(5) + with pytest.raises(nx.NodeNotFound, match="Target node 10 not in G"): + nx.simrank_similarity(G, target=10) + + def test_simrank_between_versions(self): + G = nx.cycle_graph(5) + # _python tolerance 1e-4 + expected_python_tol4 = { + 0: 1, + 1: 0.394512499239852, + 2: 0.5703550452791322, + 3: 0.5703550452791323, + 4: 0.394512499239852, + } + # _numpy tolerance 1e-4 + expected_numpy_tol4 = { + 0: 1.0, + 1: 0.3947180735764555, + 2: 0.570482097206368, + 3: 0.570482097206368, + 4: 0.3947180735764555, + } + actual = nx.simrank_similarity(G, source=0) + assert expected_numpy_tol4 == pytest.approx(actual, abs=1e-7) + # versions differ at 1e-4 level but equal at 1e-3 + assert expected_python_tol4 != pytest.approx(actual, abs=1e-4) + assert expected_python_tol4 == pytest.approx(actual, abs=1e-3) + + actual = nx.similarity._simrank_similarity_python(G, source=0) + assert expected_python_tol4 == pytest.approx(actual, abs=1e-7) + # versions differ at 1e-4 level but equal at 1e-3 + assert expected_numpy_tol4 != pytest.approx(actual, abs=1e-4) + assert expected_numpy_tol4 == pytest.approx(actual, abs=1e-3) + + def test_simrank_numpy_no_source_no_target(self): + G = nx.cycle_graph(5) + expected = np.array( + [ + [ + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + ], + [ + 0.3947180735764555, + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + ], + [ + 0.570482097206368, + 0.3947180735764555, + 1.0, + 0.3947180735764555, + 0.570482097206368, + ], + [ + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + 1.0, + 0.3947180735764555, + ], + [ + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + 1.0, + ], + ] + ) + actual = nx.similarity._simrank_similarity_numpy(G) + np.testing.assert_allclose(expected, actual, atol=1e-7) + + def test_simrank_numpy_source_no_target(self): + G = nx.cycle_graph(5) + expected = np.array( + [ + 1.0, + 0.3947180735764555, + 0.570482097206368, + 0.570482097206368, + 0.3947180735764555, + ] + ) + actual = nx.similarity._simrank_similarity_numpy(G, source=0) + np.testing.assert_allclose(expected, actual, atol=1e-7) + + def test_simrank_numpy_source_and_target(self): + G = nx.cycle_graph(5) + expected = 1.0 + actual = nx.similarity._simrank_similarity_numpy(G, source=0, target=0) + np.testing.assert_allclose(expected, actual, atol=1e-7) + + def test_panther_similarity_unweighted(self): + np.random.seed(42) + + G = nx.Graph() + G.add_edge(0, 1) + G.add_edge(0, 2) + G.add_edge(0, 3) + G.add_edge(1, 2) + G.add_edge(2, 4) + expected = {3: 0.5, 2: 0.5, 1: 0.5, 4: 0.125} + sim = nx.panther_similarity(G, 0, path_length=2) + assert sim == expected + + def test_panther_similarity_weighted(self): + np.random.seed(42) + + G = nx.Graph() + G.add_edge("v1", "v2", w=5) + G.add_edge("v1", "v3", w=1) + G.add_edge("v1", "v4", w=2) + G.add_edge("v2", "v3", w=0.1) + G.add_edge("v3", "v5", w=1) + expected = {"v3": 0.75, "v4": 0.5, "v2": 0.5, "v5": 0.25} + sim = nx.panther_similarity(G, "v1", path_length=2, weight="w") + assert sim == expected + + def test_panther_similarity_source_not_found(self): + G = nx.Graph() + G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 2), (2, 4)]) + with pytest.raises(nx.NodeNotFound, match="Source node 10 not in G"): + nx.panther_similarity(G, source=10) + + def test_panther_similarity_isolated(self): + G = nx.Graph() + G.add_nodes_from(range(5)) + with pytest.raises( + nx.NetworkXUnfeasible, + match="Panther similarity is not defined for the isolated source node 1.", + ): + nx.panther_similarity(G, source=1) + + @pytest.mark.parametrize("num_paths", (1, 3, 10)) + @pytest.mark.parametrize("source", (0, 1)) + def test_generate_random_paths_with_start(self, num_paths, source): + G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (2, 4)]) + index_map = {} + + path_gen = nx.generate_random_paths( + G, + num_paths, + path_length=2, + index_map=index_map, + source=source, + ) + paths = list(path_gen) + + # There should be num_paths paths + assert len(paths) == num_paths + # And they should all start with `source` + assert all(p[0] == source for p in paths) + # The index_map for the `source` node should contain the indices for + # all of the generated paths. + assert sorted(index_map[source]) == list(range(num_paths)) + + def test_generate_random_paths_unweighted(self): + index_map = {} + num_paths = 10 + path_length = 2 + G = nx.Graph() + G.add_edge(0, 1) + G.add_edge(0, 2) + G.add_edge(0, 3) + G.add_edge(1, 2) + G.add_edge(2, 4) + paths = nx.generate_random_paths( + G, num_paths, path_length=path_length, index_map=index_map, seed=42 + ) + expected_paths = [ + [3, 0, 3], + [4, 2, 1], + [2, 1, 0], + [2, 0, 3], + [3, 0, 1], + [3, 0, 1], + [4, 2, 0], + [2, 1, 0], + [3, 0, 2], + [2, 1, 2], + ] + expected_map = { + 0: {0, 2, 3, 4, 5, 6, 7, 8}, + 1: {1, 2, 4, 5, 7, 9}, + 2: {1, 2, 3, 6, 7, 8, 9}, + 3: {0, 3, 4, 5, 8}, + 4: {1, 6}, + } + + assert expected_paths == list(paths) + assert expected_map == index_map + + def test_generate_random_paths_weighted(self): + np.random.seed(42) + + index_map = {} + num_paths = 10 + path_length = 6 + G = nx.Graph() + G.add_edge("a", "b", weight=0.6) + G.add_edge("a", "c", weight=0.2) + G.add_edge("c", "d", weight=0.1) + G.add_edge("c", "e", weight=0.7) + G.add_edge("c", "f", weight=0.9) + G.add_edge("a", "d", weight=0.3) + paths = nx.generate_random_paths( + G, num_paths, path_length=path_length, index_map=index_map + ) + + expected_paths = [ + ["d", "c", "f", "c", "d", "a", "b"], + ["e", "c", "f", "c", "f", "c", "e"], + ["d", "a", "b", "a", "b", "a", "c"], + ["b", "a", "d", "a", "b", "a", "b"], + ["d", "a", "b", "a", "b", "a", "d"], + ["d", "a", "b", "a", "b", "a", "c"], + ["d", "a", "b", "a", "b", "a", "b"], + ["f", "c", "f", "c", "f", "c", "e"], + ["d", "a", "d", "a", "b", "a", "b"], + ["e", "c", "f", "c", "e", "c", "d"], + ] + expected_map = { + "d": {0, 2, 3, 4, 5, 6, 8, 9}, + "c": {0, 1, 2, 5, 7, 9}, + "f": {0, 1, 9, 7}, + "a": {0, 2, 3, 4, 5, 6, 8}, + "b": {0, 2, 3, 4, 5, 6, 8}, + "e": {1, 9, 7}, + } + + assert expected_paths == list(paths) + assert expected_map == index_map + + def test_symmetry_with_custom_matching(self): + """G2 has edge (a,b) and G3 has edge (a,a) but node order for G2 is (a,b) + while for G3 it is (b,a)""" + + a, b = "A", "B" + G2 = nx.Graph() + G2.add_nodes_from((a, b)) + G2.add_edges_from([(a, b)]) + G3 = nx.Graph() + G3.add_nodes_from((b, a)) + G3.add_edges_from([(a, a)]) + for G in (G2, G3): + for n in G: + G.nodes[n]["attr"] = n + for e in G.edges: + G.edges[e]["attr"] = e + + def user_match(x, y): + return x == y + + assert ( + nx.graph_edit_distance(G2, G3, node_match=user_match, edge_match=user_match) + == 1 + ) + assert ( + nx.graph_edit_distance(G3, G2, node_match=user_match, edge_match=user_match) + == 1 + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_simple_paths.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_simple_paths.py new file mode 100644 index 0000000..7855bba --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_simple_paths.py @@ -0,0 +1,803 @@ +import random + +import pytest + +import networkx as nx +from networkx import convert_node_labels_to_integers as cnlti +from networkx.algorithms.simple_paths import ( + _bidirectional_dijkstra, + _bidirectional_shortest_path, +) +from networkx.utils import arbitrary_element, pairwise + + +class TestIsSimplePath: + """Unit tests for the + :func:`networkx.algorithms.simple_paths.is_simple_path` function. + + """ + + def test_empty_list(self): + """Tests that the empty list is not a valid path, since there + should be a one-to-one correspondence between paths as lists of + nodes and paths as lists of edges. + + """ + G = nx.trivial_graph() + assert not nx.is_simple_path(G, []) + + def test_trivial_path(self): + """Tests that the trivial path, a path of length one, is + considered a simple path in a graph. + + """ + G = nx.trivial_graph() + assert nx.is_simple_path(G, [0]) + + def test_trivial_nonpath(self): + """Tests that a list whose sole element is an object not in the + graph is not considered a simple path. + + """ + G = nx.trivial_graph() + assert not nx.is_simple_path(G, ["not a node"]) + + def test_simple_path(self): + G = nx.path_graph(2) + assert nx.is_simple_path(G, [0, 1]) + + def test_non_simple_path(self): + G = nx.path_graph(2) + assert not nx.is_simple_path(G, [0, 1, 0]) + + def test_cycle(self): + G = nx.cycle_graph(3) + assert not nx.is_simple_path(G, [0, 1, 2, 0]) + + def test_missing_node(self): + G = nx.path_graph(2) + assert not nx.is_simple_path(G, [0, 2]) + + def test_missing_starting_node(self): + G = nx.path_graph(2) + assert not nx.is_simple_path(G, [2, 0]) + + def test_directed_path(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + assert nx.is_simple_path(G, [0, 1, 2]) + + def test_directed_non_path(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + assert not nx.is_simple_path(G, [2, 1, 0]) + + def test_directed_cycle(self): + G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + assert not nx.is_simple_path(G, [0, 1, 2, 0]) + + def test_multigraph(self): + G = nx.MultiGraph([(0, 1), (0, 1)]) + assert nx.is_simple_path(G, [0, 1]) + + def test_multidigraph(self): + G = nx.MultiDiGraph([(0, 1), (0, 1), (1, 0), (1, 0)]) + assert nx.is_simple_path(G, [0, 1]) + + +# Tests for all_simple_paths +def test_all_simple_paths(): + G = nx.path_graph(4) + paths = nx.all_simple_paths(G, 0, 3) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3)} + + +def test_all_simple_paths_with_two_targets_emits_two_paths(): + G = nx.path_graph(4) + G.add_edge(2, 4) + paths = nx.all_simple_paths(G, 0, [3, 4]) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)} + + +def test_digraph_all_simple_paths_with_two_targets_emits_two_paths(): + G = nx.path_graph(4, create_using=nx.DiGraph()) + G.add_edge(2, 4) + paths = nx.all_simple_paths(G, 0, [3, 4]) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)} + + +def test_all_simple_paths_with_two_targets_cutoff(): + G = nx.path_graph(4) + G.add_edge(2, 4) + paths = nx.all_simple_paths(G, 0, [3, 4], cutoff=3) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)} + + +def test_digraph_all_simple_paths_with_two_targets_cutoff(): + G = nx.path_graph(4, create_using=nx.DiGraph()) + G.add_edge(2, 4) + paths = nx.all_simple_paths(G, 0, [3, 4], cutoff=3) + assert {tuple(p) for p in paths} == {(0, 1, 2, 3), (0, 1, 2, 4)} + + +def test_all_simple_paths_with_two_targets_in_line_emits_two_paths(): + G = nx.path_graph(4) + paths = nx.all_simple_paths(G, 0, [2, 3]) + assert {tuple(p) for p in paths} == {(0, 1, 2), (0, 1, 2, 3)} + + +def test_all_simple_paths_ignores_cycle(): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(1, 3) + paths = nx.all_simple_paths(G, 0, 3) + assert {tuple(p) for p in paths} == {(0, 1, 3)} + + +def test_all_simple_paths_with_two_targets_inside_cycle_emits_two_paths(): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(1, 3) + paths = nx.all_simple_paths(G, 0, [2, 3]) + assert {tuple(p) for p in paths} == {(0, 1, 2), (0, 1, 3)} + + +def test_all_simple_paths_source_target(): + G = nx.path_graph(4) + assert list(nx.all_simple_paths(G, 1, 1)) == [[1]] + + +def test_all_simple_paths_cutoff(): + G = nx.complete_graph(4) + paths = nx.all_simple_paths(G, 0, 1, cutoff=1) + assert {tuple(p) for p in paths} == {(0, 1)} + paths = nx.all_simple_paths(G, 0, 1, cutoff=2) + assert {tuple(p) for p in paths} == {(0, 1), (0, 2, 1), (0, 3, 1)} + + +def test_all_simple_paths_on_non_trivial_graph(): + """you may need to draw this graph to make sure it is reasonable""" + G = nx.path_graph(5, create_using=nx.DiGraph()) + G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)]) + paths = nx.all_simple_paths(G, 1, [2, 3]) + assert {tuple(p) for p in paths} == { + (1, 2), + (1, 3, 4, 2), + (1, 5, 4, 2), + (1, 3), + (1, 2, 3), + (1, 5, 4, 3), + (1, 5, 4, 2, 3), + } + paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=3) + assert {tuple(p) for p in paths} == { + (1, 2), + (1, 3, 4, 2), + (1, 5, 4, 2), + (1, 3), + (1, 2, 3), + (1, 5, 4, 3), + } + paths = nx.all_simple_paths(G, 1, [2, 3], cutoff=2) + assert {tuple(p) for p in paths} == {(1, 2), (1, 3), (1, 2, 3)} + + +def test_all_simple_paths_multigraph(): + G = nx.MultiGraph([(1, 2), (1, 2)]) + assert list(nx.all_simple_paths(G, 1, 1)) == [[1]] + nx.add_path(G, [3, 1, 10, 2]) + paths = list(nx.all_simple_paths(G, 1, 2)) + assert len(paths) == 3 + assert {tuple(p) for p in paths} == {(1, 2), (1, 2), (1, 10, 2)} + + +def test_all_simple_paths_multigraph_with_cutoff(): + G = nx.MultiGraph([(1, 2), (1, 2), (1, 10), (10, 2)]) + paths = list(nx.all_simple_paths(G, 1, 2, cutoff=1)) + assert len(paths) == 2 + assert {tuple(p) for p in paths} == {(1, 2), (1, 2)} + + # See GitHub issue #6732. + G = nx.MultiGraph([(0, 1), (0, 2)]) + assert list(nx.all_simple_paths(G, 0, {1, 2}, cutoff=1)) == [[0, 1], [0, 2]] + + +def test_all_simple_paths_directed(): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [3, 2, 1]) + paths = nx.all_simple_paths(G, 1, 3) + assert {tuple(p) for p in paths} == {(1, 2, 3)} + + +def test_all_simple_paths_empty(): + G = nx.path_graph(4) + paths = nx.all_simple_paths(G, 0, 3, cutoff=2) + assert list(paths) == [] + + +def test_all_simple_paths_corner_cases(): + assert list(nx.all_simple_paths(nx.empty_graph(2), 0, 0)) == [[0]] + assert list(nx.all_simple_paths(nx.empty_graph(2), 0, 1)) == [] + assert list(nx.all_simple_paths(nx.path_graph(9), 0, 8, 0)) == [] + + +def test_all_simple_paths_source_in_targets(): + # See GitHub issue #6690. + G = nx.path_graph(3) + assert list(nx.all_simple_paths(G, 0, {0, 1, 2})) == [[0], [0, 1], [0, 1, 2]] + + +def hamiltonian_path(G, source): + source = arbitrary_element(G) + neighbors = set(G[source]) - {source} + n = len(G) + for target in neighbors: + for path in nx.all_simple_paths(G, source, target): + if len(path) == n: + yield path + + +def test_hamiltonian_path(): + from itertools import permutations + + G = nx.complete_graph(4) + paths = [list(p) for p in hamiltonian_path(G, 0)] + exact = [[0] + list(p) for p in permutations([1, 2, 3], 3)] + assert sorted(paths) == sorted(exact) + + +def test_cutoff_zero(): + G = nx.complete_graph(4) + paths = nx.all_simple_paths(G, 0, 3, cutoff=0) + assert [list(p) for p in paths] == [] + paths = nx.all_simple_paths(nx.MultiGraph(G), 0, 3, cutoff=0) + assert [list(p) for p in paths] == [] + + +def test_source_missing(): + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.all_simple_paths(nx.MultiGraph(G), 0, 3)) + + +def test_target_missing(): + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.all_simple_paths(nx.MultiGraph(G), 1, 4)) + + +# Tests for all_simple_edge_paths +def test_all_simple_edge_paths(): + G = nx.path_graph(4) + paths = nx.all_simple_edge_paths(G, 0, 3) + assert {tuple(p) for p in paths} == {((0, 1), (1, 2), (2, 3))} + + +def test_all_simple_edge_paths_empty_path(): + G = nx.empty_graph(1) + assert list(nx.all_simple_edge_paths(G, 0, 0)) == [[]] + + +def test_all_simple_edge_paths_with_two_targets_emits_two_paths(): + G = nx.path_graph(4) + G.add_edge(2, 4) + paths = nx.all_simple_edge_paths(G, 0, [3, 4]) + assert {tuple(p) for p in paths} == { + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } + + +def test_digraph_all_simple_edge_paths_with_two_targets_emits_two_paths(): + G = nx.path_graph(4, create_using=nx.DiGraph()) + G.add_edge(2, 4) + paths = nx.all_simple_edge_paths(G, 0, [3, 4]) + assert {tuple(p) for p in paths} == { + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } + + +def test_all_simple_edge_paths_with_two_targets_cutoff(): + G = nx.path_graph(4) + G.add_edge(2, 4) + paths = nx.all_simple_edge_paths(G, 0, [3, 4], cutoff=3) + assert {tuple(p) for p in paths} == { + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } + + +def test_digraph_all_simple_edge_paths_with_two_targets_cutoff(): + G = nx.path_graph(4, create_using=nx.DiGraph()) + G.add_edge(2, 4) + paths = nx.all_simple_edge_paths(G, 0, [3, 4], cutoff=3) + assert {tuple(p) for p in paths} == { + ((0, 1), (1, 2), (2, 3)), + ((0, 1), (1, 2), (2, 4)), + } + + +def test_all_simple_edge_paths_with_two_targets_in_line_emits_two_paths(): + G = nx.path_graph(4) + paths = nx.all_simple_edge_paths(G, 0, [2, 3]) + assert {tuple(p) for p in paths} == {((0, 1), (1, 2)), ((0, 1), (1, 2), (2, 3))} + + +def test_all_simple_edge_paths_ignores_cycle(): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(1, 3) + paths = nx.all_simple_edge_paths(G, 0, 3) + assert {tuple(p) for p in paths} == {((0, 1), (1, 3))} + + +def test_all_simple_edge_paths_with_two_targets_inside_cycle_emits_two_paths(): + G = nx.cycle_graph(3, create_using=nx.DiGraph()) + G.add_edge(1, 3) + paths = nx.all_simple_edge_paths(G, 0, [2, 3]) + assert {tuple(p) for p in paths} == {((0, 1), (1, 2)), ((0, 1), (1, 3))} + + +def test_all_simple_edge_paths_source_target(): + G = nx.path_graph(4) + paths = nx.all_simple_edge_paths(G, 1, 1) + assert list(paths) == [[]] + + +def test_all_simple_edge_paths_cutoff(): + G = nx.complete_graph(4) + paths = nx.all_simple_edge_paths(G, 0, 1, cutoff=1) + assert {tuple(p) for p in paths} == {((0, 1),)} + paths = nx.all_simple_edge_paths(G, 0, 1, cutoff=2) + assert {tuple(p) for p in paths} == {((0, 1),), ((0, 2), (2, 1)), ((0, 3), (3, 1))} + + +def test_all_simple_edge_paths_on_non_trivial_graph(): + """you may need to draw this graph to make sure it is reasonable""" + G = nx.path_graph(5, create_using=nx.DiGraph()) + G.add_edges_from([(0, 5), (1, 5), (1, 3), (5, 4), (4, 2), (4, 3)]) + paths = nx.all_simple_edge_paths(G, 1, [2, 3]) + assert {tuple(p) for p in paths} == { + ((1, 2),), + ((1, 3), (3, 4), (4, 2)), + ((1, 5), (5, 4), (4, 2)), + ((1, 3),), + ((1, 2), (2, 3)), + ((1, 5), (5, 4), (4, 3)), + ((1, 5), (5, 4), (4, 2), (2, 3)), + } + paths = nx.all_simple_edge_paths(G, 1, [2, 3], cutoff=3) + assert {tuple(p) for p in paths} == { + ((1, 2),), + ((1, 3), (3, 4), (4, 2)), + ((1, 5), (5, 4), (4, 2)), + ((1, 3),), + ((1, 2), (2, 3)), + ((1, 5), (5, 4), (4, 3)), + } + paths = nx.all_simple_edge_paths(G, 1, [2, 3], cutoff=2) + assert {tuple(p) for p in paths} == {((1, 2),), ((1, 3),), ((1, 2), (2, 3))} + + +def test_all_simple_edge_paths_multigraph(): + G = nx.MultiGraph([(1, 2), (1, 2)]) + paths = nx.all_simple_edge_paths(G, 1, 1) + assert list(paths) == [[]] + nx.add_path(G, [3, 1, 10, 2]) + paths = list(nx.all_simple_edge_paths(G, 1, 2)) + assert len(paths) == 3 + assert {tuple(p) for p in paths} == { + ((1, 2, 0),), + ((1, 2, 1),), + ((1, 10, 0), (10, 2, 0)), + } + + +def test_all_simple_edge_paths_multigraph_with_cutoff(): + G = nx.MultiGraph([(1, 2), (1, 2), (1, 10), (10, 2)]) + paths = list(nx.all_simple_edge_paths(G, 1, 2, cutoff=1)) + assert len(paths) == 2 + assert {tuple(p) for p in paths} == {((1, 2, 0),), ((1, 2, 1),)} + + +def test_all_simple_edge_paths_directed(): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [3, 2, 1]) + paths = nx.all_simple_edge_paths(G, 1, 3) + assert {tuple(p) for p in paths} == {((1, 2), (2, 3))} + + +def test_all_simple_edge_paths_empty(): + G = nx.path_graph(4) + paths = nx.all_simple_edge_paths(G, 0, 3, cutoff=2) + assert list(paths) == [] + + +def test_all_simple_edge_paths_corner_cases(): + assert list(nx.all_simple_edge_paths(nx.empty_graph(2), 0, 0)) == [[]] + assert list(nx.all_simple_edge_paths(nx.empty_graph(2), 0, 1)) == [] + assert list(nx.all_simple_edge_paths(nx.path_graph(9), 0, 8, 0)) == [] + + +def test_all_simple_edge_paths_ignores_self_loop(): + G = nx.Graph([(0, 0), (0, 1), (1, 1), (1, 2)]) + assert list(nx.all_simple_edge_paths(G, 0, 2)) == [[(0, 1), (1, 2)]] + + +def hamiltonian_edge_path(G, source): + source = arbitrary_element(G) + neighbors = set(G[source]) - {source} + n = len(G) + for target in neighbors: + for path in nx.all_simple_edge_paths(G, source, target): + if len(path) == n - 1: + yield path + + +def test_hamiltonian__edge_path(): + from itertools import permutations + + G = nx.complete_graph(4) + paths = hamiltonian_edge_path(G, 0) + exact = [list(pairwise([0] + list(p))) for p in permutations([1, 2, 3], 3)] + assert sorted(exact) == sorted(paths) + + +def test_edge_cutoff_zero(): + G = nx.complete_graph(4) + paths = nx.all_simple_edge_paths(G, 0, 3, cutoff=0) + assert [list(p) for p in paths] == [] + paths = nx.all_simple_edge_paths(nx.MultiGraph(G), 0, 3, cutoff=0) + assert [list(p) for p in paths] == [] + + +def test_edge_source_missing(): + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.all_simple_edge_paths(nx.MultiGraph(G), 0, 3)) + + +def test_edge_target_missing(): + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.all_simple_edge_paths(nx.MultiGraph(G), 1, 4)) + + +# Tests for shortest_simple_paths +def test_shortest_simple_paths(): + G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + paths = nx.shortest_simple_paths(G, 1, 12) + assert next(paths) == [1, 2, 3, 4, 8, 12] + assert next(paths) == [1, 5, 6, 7, 8, 12] + assert [len(path) for path in nx.shortest_simple_paths(G, 1, 12)] == sorted( + len(path) for path in nx.all_simple_paths(G, 1, 12) + ) + + +def test_shortest_simple_paths_singleton_path(): + G = nx.empty_graph(3) + assert list(nx.shortest_simple_paths(G, 0, 0)) == [[0]] + + +def test_shortest_simple_paths_directed(): + G = nx.cycle_graph(7, create_using=nx.DiGraph()) + paths = nx.shortest_simple_paths(G, 0, 3) + assert list(paths) == [[0, 1, 2, 3]] + + +def test_shortest_simple_paths_directed_with_weight_function(): + def cost(u, v, x): + return 1 + + G = cnlti(nx.grid_2d_graph(4, 4), first_label=1, ordering="sorted") + paths = nx.shortest_simple_paths(G, 1, 12) + assert next(paths) == [1, 2, 3, 4, 8, 12] + assert next(paths) == [1, 5, 6, 7, 8, 12] + assert [ + len(path) for path in nx.shortest_simple_paths(G, 1, 12, weight=cost) + ] == sorted(len(path) for path in nx.all_simple_paths(G, 1, 12)) + + +def test_shortest_simple_paths_with_weight_function(): + def cost(u, v, x): + return 1 + + G = nx.cycle_graph(7, create_using=nx.DiGraph()) + paths = nx.shortest_simple_paths(G, 0, 3, weight=cost) + assert list(paths) == [[0, 1, 2, 3]] + + +def test_shortest_simple_paths_with_none_weight_function(): + def cost(u, v, x): + delta = abs(u - v) + # ignore interior edges + return 1 if (delta == 1 or delta == 4) else None + + G = nx.complete_graph(5) + paths = nx.shortest_simple_paths(G, 0, 2, weight=cost) + assert list(paths) == [[0, 1, 2], [0, 4, 3, 2]] + + +def test_Greg_Bernstein(): + g1 = nx.Graph() + g1.add_nodes_from(["N0", "N1", "N2", "N3", "N4"]) + g1.add_edge("N4", "N1", weight=10.0, capacity=50, name="L5") + g1.add_edge("N4", "N0", weight=7.0, capacity=40, name="L4") + g1.add_edge("N0", "N1", weight=10.0, capacity=45, name="L1") + g1.add_edge("N3", "N0", weight=10.0, capacity=50, name="L0") + g1.add_edge("N2", "N3", weight=12.0, capacity=30, name="L2") + g1.add_edge("N1", "N2", weight=15.0, capacity=42, name="L3") + solution = [["N1", "N0", "N3"], ["N1", "N2", "N3"], ["N1", "N4", "N0", "N3"]] + result = list(nx.shortest_simple_paths(g1, "N1", "N3", weight="weight")) + assert result == solution + + +def test_weighted_shortest_simple_path(): + def cost_func(path): + return sum(G.adj[u][v]["weight"] for (u, v) in zip(path, path[1:])) + + G = nx.complete_graph(5) + weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()} + nx.set_edge_attributes(G, weight, "weight") + cost = 0 + for path in nx.shortest_simple_paths(G, 0, 3, weight="weight"): + this_cost = cost_func(path) + assert cost <= this_cost + cost = this_cost + + +def test_directed_weighted_shortest_simple_path(): + def cost_func(path): + return sum(G.adj[u][v]["weight"] for (u, v) in zip(path, path[1:])) + + G = nx.complete_graph(5) + G = G.to_directed() + weight = {(u, v): random.randint(1, 100) for (u, v) in G.edges()} + nx.set_edge_attributes(G, weight, "weight") + cost = 0 + for path in nx.shortest_simple_paths(G, 0, 3, weight="weight"): + this_cost = cost_func(path) + assert cost <= this_cost + cost = this_cost + + +def test_weighted_shortest_simple_path_issue2427(): + G = nx.Graph() + G.add_edge("IN", "OUT", weight=2) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=2) + G.add_edge("B", "OUT", weight=2) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "OUT"], + ["IN", "B", "OUT"], + ] + G = nx.Graph() + G.add_edge("IN", "OUT", weight=10) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=1) + G.add_edge("B", "OUT", weight=1) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "B", "OUT"], + ["IN", "OUT"], + ] + + +def test_directed_weighted_shortest_simple_path_issue2427(): + G = nx.DiGraph() + G.add_edge("IN", "OUT", weight=2) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=2) + G.add_edge("B", "OUT", weight=2) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "OUT"], + ["IN", "B", "OUT"], + ] + G = nx.DiGraph() + G.add_edge("IN", "OUT", weight=10) + G.add_edge("IN", "A", weight=1) + G.add_edge("IN", "B", weight=1) + G.add_edge("B", "OUT", weight=1) + assert list(nx.shortest_simple_paths(G, "IN", "OUT", weight="weight")) == [ + ["IN", "B", "OUT"], + ["IN", "OUT"], + ] + + +def test_weight_name(): + G = nx.cycle_graph(7) + nx.set_edge_attributes(G, 1, "weight") + nx.set_edge_attributes(G, 1, "foo") + G.adj[1][2]["foo"] = 7 + paths = list(nx.shortest_simple_paths(G, 0, 3, weight="foo")) + solution = [[0, 6, 5, 4, 3], [0, 1, 2, 3]] + assert paths == solution + + +def test_ssp_source_missing(): + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.shortest_simple_paths(G, 0, 3)) + + +def test_ssp_target_missing(): + with pytest.raises(nx.NodeNotFound): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + list(nx.shortest_simple_paths(G, 1, 4)) + + +def test_ssp_multigraph(): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.MultiGraph() + nx.add_path(G, [1, 2, 3]) + list(nx.shortest_simple_paths(G, 1, 4)) + + +def test_ssp_source_missing2(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [0, 1, 2]) + nx.add_path(G, [3, 4, 5]) + list(nx.shortest_simple_paths(G, 0, 3)) + + +def test_bidirectional_shortest_path_restricted_cycle(): + cycle = nx.cycle_graph(7) + length, path = _bidirectional_shortest_path(cycle, 0, 3) + assert path == [0, 1, 2, 3] + length, path = _bidirectional_shortest_path(cycle, 0, 3, ignore_nodes=[1]) + assert path == [0, 6, 5, 4, 3] + + +def test_bidirectional_shortest_path_restricted_wheel(): + wheel = nx.wheel_graph(6) + length, path = _bidirectional_shortest_path(wheel, 1, 3) + assert path in [[1, 0, 3], [1, 2, 3]] + length, path = _bidirectional_shortest_path(wheel, 1, 3, ignore_nodes=[0]) + assert path == [1, 2, 3] + length, path = _bidirectional_shortest_path(wheel, 1, 3, ignore_nodes=[0, 2]) + assert path == [1, 5, 4, 3] + length, path = _bidirectional_shortest_path( + wheel, 1, 3, ignore_edges=[(1, 0), (5, 0), (2, 3)] + ) + assert path in [[1, 2, 0, 3], [1, 5, 4, 3]] + + +def test_bidirectional_shortest_path_restricted_directed_cycle(): + directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph()) + length, path = _bidirectional_shortest_path(directed_cycle, 0, 3) + assert path == [0, 1, 2, 3] + pytest.raises( + nx.NetworkXNoPath, + _bidirectional_shortest_path, + directed_cycle, + 0, + 3, + ignore_nodes=[1], + ) + length, path = _bidirectional_shortest_path( + directed_cycle, 0, 3, ignore_edges=[(2, 1)] + ) + assert path == [0, 1, 2, 3] + pytest.raises( + nx.NetworkXNoPath, + _bidirectional_shortest_path, + directed_cycle, + 0, + 3, + ignore_edges=[(1, 2)], + ) + + +def test_bidirectional_shortest_path_ignore(): + G = nx.Graph() + nx.add_path(G, [1, 2]) + nx.add_path(G, [1, 3]) + nx.add_path(G, [1, 4]) + pytest.raises( + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[1] + ) + pytest.raises( + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[2] + ) + G = nx.Graph() + nx.add_path(G, [1, 3]) + nx.add_path(G, [1, 4]) + nx.add_path(G, [3, 2]) + pytest.raises( + nx.NetworkXNoPath, _bidirectional_shortest_path, G, 1, 2, ignore_nodes=[1, 2] + ) + + +def validate_path(G, s, t, soln_len, path): + assert path[0] == s + assert path[-1] == t + assert soln_len == sum( + G[u][v].get("weight", 1) for u, v in zip(path[:-1], path[1:]) + ) + + +def validate_length_path(G, s, t, soln_len, length, path): + assert soln_len == length + validate_path(G, s, t, length, path) + + +def test_bidirectional_dijkstra_restricted(): + XG = nx.DiGraph() + XG.add_weighted_edges_from( + [ + ("s", "u", 10), + ("s", "x", 5), + ("u", "v", 1), + ("u", "x", 2), + ("v", "y", 1), + ("x", "u", 3), + ("x", "v", 5), + ("x", "y", 2), + ("y", "s", 7), + ("y", "v", 6), + ] + ) + + XG3 = nx.Graph() + XG3.add_weighted_edges_from( + [[0, 1, 2], [1, 2, 12], [2, 3, 1], [3, 4, 5], [4, 5, 1], [5, 0, 10]] + ) + validate_length_path(XG, "s", "v", 9, *_bidirectional_dijkstra(XG, "s", "v")) + validate_length_path( + XG, "s", "v", 10, *_bidirectional_dijkstra(XG, "s", "v", ignore_nodes=["u"]) + ) + validate_length_path( + XG, + "s", + "v", + 11, + *_bidirectional_dijkstra(XG, "s", "v", ignore_edges=[("s", "x")]), + ) + pytest.raises( + nx.NetworkXNoPath, + _bidirectional_dijkstra, + XG, + "s", + "v", + ignore_nodes=["u"], + ignore_edges=[("s", "x")], + ) + validate_length_path(XG3, 0, 3, 15, *_bidirectional_dijkstra(XG3, 0, 3)) + validate_length_path( + XG3, 0, 3, 16, *_bidirectional_dijkstra(XG3, 0, 3, ignore_nodes=[1]) + ) + validate_length_path( + XG3, 0, 3, 16, *_bidirectional_dijkstra(XG3, 0, 3, ignore_edges=[(2, 3)]) + ) + pytest.raises( + nx.NetworkXNoPath, + _bidirectional_dijkstra, + XG3, + 0, + 3, + ignore_nodes=[1], + ignore_edges=[(5, 4)], + ) + + +def test_bidirectional_dijkstra_no_path(): + with pytest.raises(nx.NetworkXNoPath): + G = nx.Graph() + nx.add_path(G, [1, 2, 3]) + nx.add_path(G, [4, 5, 6]) + _bidirectional_dijkstra(G, 1, 6) + + +def test_bidirectional_dijkstra_ignore(): + G = nx.Graph() + nx.add_path(G, [1, 2, 10]) + nx.add_path(G, [1, 3, 10]) + pytest.raises(nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[1]) + pytest.raises(nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[2]) + pytest.raises( + nx.NetworkXNoPath, _bidirectional_dijkstra, G, 1, 2, ignore_nodes=[1, 2] + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smallworld.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smallworld.py new file mode 100644 index 0000000..c8e4542 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smallworld.py @@ -0,0 +1,76 @@ +import pytest + +pytest.importorskip("numpy") + +import networkx as nx +from networkx import lattice_reference, omega, random_reference, sigma + +rng = 42 + + +def test_random_reference(): + G = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng) + Gr = random_reference(G, niter=1, seed=rng) + C = nx.average_clustering(G) + Cr = nx.average_clustering(Gr) + assert C > Cr + + with pytest.raises(nx.NetworkXError): + next(random_reference(nx.Graph())) + with pytest.raises(nx.NetworkXNotImplemented): + next(random_reference(nx.DiGraph())) + + H = nx.Graph(((0, 1), (2, 3))) + Hl = random_reference(H, niter=1, seed=rng) + + +def test_lattice_reference(): + G = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng) + Gl = lattice_reference(G, niter=1, seed=rng) + L = nx.average_shortest_path_length(G) + Ll = nx.average_shortest_path_length(Gl) + assert Ll > L + + pytest.raises(nx.NetworkXError, lattice_reference, nx.Graph()) + pytest.raises(nx.NetworkXNotImplemented, lattice_reference, nx.DiGraph()) + + H = nx.Graph(((0, 1), (2, 3))) + Hl = lattice_reference(H, niter=1) + + +def test_sigma(): + Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng) + Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng) + sigmas = sigma(Gs, niter=1, nrand=2, seed=rng) + sigmar = sigma(Gr, niter=1, nrand=2, seed=rng) + assert sigmar < sigmas + + +def test_omega(): + Gl = nx.connected_watts_strogatz_graph(50, 6, 0, seed=rng) + Gr = nx.connected_watts_strogatz_graph(50, 6, 1, seed=rng) + Gs = nx.connected_watts_strogatz_graph(50, 6, 0.1, seed=rng) + omegal = omega(Gl, niter=1, nrand=1, seed=rng) + omegar = omega(Gr, niter=1, nrand=1, seed=rng) + omegas = omega(Gs, niter=1, nrand=1, seed=rng) + assert omegal < omegas and omegas < omegar + + # Test that omega lies within the [-1, 1] bounds + G_barbell = nx.barbell_graph(5, 1) + G_karate = nx.karate_club_graph() + + omega_barbell = nx.omega(G_barbell) + omega_karate = nx.omega(G_karate, nrand=2) + + omegas = (omegal, omegar, omegas, omega_barbell, omega_karate) + + for o in omegas: + assert -1 <= o <= 1 + + +@pytest.mark.parametrize("f", (nx.random_reference, nx.lattice_reference)) +def test_graph_no_edges(f): + G = nx.Graph() + G.add_nodes_from([0, 1, 2, 3]) + with pytest.raises(nx.NetworkXError, match="Graph has fewer that 2 edges"): + f(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smetric.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smetric.py new file mode 100644 index 0000000..528dbc8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_smetric.py @@ -0,0 +1,8 @@ +import pytest + +import networkx as nx + + +def test_smetric(): + G = nx.Graph([(1, 2), (2, 3), (2, 4), (1, 4)]) + assert nx.s_metric(G) == 19.0 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_sparsifiers.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_sparsifiers.py new file mode 100644 index 0000000..e8604e6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_sparsifiers.py @@ -0,0 +1,138 @@ +"""Unit tests for the sparsifier computation functions.""" + +import pytest + +import networkx as nx +from networkx.utils import py_random_state + +_seed = 2 + + +def _test_spanner(G, spanner, stretch, weight=None): + """Test whether a spanner is valid. + + This function tests whether the given spanner is a subgraph of the + given graph G with the same node set. It also tests for all shortest + paths whether they adhere to the given stretch. + + Parameters + ---------- + G : NetworkX graph + The original graph for which the spanner was constructed. + + spanner : NetworkX graph + The spanner to be tested. + + stretch : float + The proclaimed stretch of the spanner. + + weight : object + The edge attribute to use as distance. + """ + # check node set + assert set(G.nodes()) == set(spanner.nodes()) + + # check edge set and weights + for u, v in spanner.edges(): + assert G.has_edge(u, v) + if weight: + assert spanner[u][v][weight] == G[u][v][weight] + + # check connectivity and stretch + original_length = dict(nx.shortest_path_length(G, weight=weight)) + spanner_length = dict(nx.shortest_path_length(spanner, weight=weight)) + for u in G.nodes(): + for v in G.nodes(): + if u in original_length and v in original_length[u]: + assert spanner_length[u][v] <= stretch * original_length[u][v] + + +@py_random_state(1) +def _assign_random_weights(G, seed=None): + """Assigns random weights to the edges of a graph. + + Parameters + ---------- + + G : NetworkX graph + The original graph for which the spanner was constructed. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + """ + for u, v in G.edges(): + G[u][v]["weight"] = seed.random() + + +def test_spanner_trivial(): + """Test a trivial spanner with stretch 1.""" + G = nx.complete_graph(20) + spanner = nx.spanner(G, 1, seed=_seed) + + for u, v in G.edges: + assert spanner.has_edge(u, v) + + +def test_spanner_unweighted_complete_graph(): + """Test spanner construction on a complete unweighted graph.""" + G = nx.complete_graph(20) + + spanner = nx.spanner(G, 4, seed=_seed) + _test_spanner(G, spanner, 4) + + spanner = nx.spanner(G, 10, seed=_seed) + _test_spanner(G, spanner, 10) + + +def test_spanner_weighted_complete_graph(): + """Test spanner construction on a complete weighted graph.""" + G = nx.complete_graph(20) + _assign_random_weights(G, seed=_seed) + + spanner = nx.spanner(G, 4, weight="weight", seed=_seed) + _test_spanner(G, spanner, 4, weight="weight") + + spanner = nx.spanner(G, 10, weight="weight", seed=_seed) + _test_spanner(G, spanner, 10, weight="weight") + + +def test_spanner_unweighted_gnp_graph(): + """Test spanner construction on an unweighted gnp graph.""" + G = nx.gnp_random_graph(20, 0.4, seed=_seed) + + spanner = nx.spanner(G, 4, seed=_seed) + _test_spanner(G, spanner, 4) + + spanner = nx.spanner(G, 10, seed=_seed) + _test_spanner(G, spanner, 10) + + +def test_spanner_weighted_gnp_graph(): + """Test spanner construction on an weighted gnp graph.""" + G = nx.gnp_random_graph(20, 0.4, seed=_seed) + _assign_random_weights(G, seed=_seed) + + spanner = nx.spanner(G, 4, weight="weight", seed=_seed) + _test_spanner(G, spanner, 4, weight="weight") + + spanner = nx.spanner(G, 10, weight="weight", seed=_seed) + _test_spanner(G, spanner, 10, weight="weight") + + +def test_spanner_unweighted_disconnected_graph(): + """Test spanner construction on a disconnected graph.""" + G = nx.disjoint_union(nx.complete_graph(10), nx.complete_graph(10)) + + spanner = nx.spanner(G, 4, seed=_seed) + _test_spanner(G, spanner, 4) + + spanner = nx.spanner(G, 10, seed=_seed) + _test_spanner(G, spanner, 10) + + +def test_spanner_invalid_stretch(): + """Check whether an invalid stretch is caught.""" + with pytest.raises(ValueError): + G = nx.empty_graph() + nx.spanner(G, 0) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_structuralholes.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_structuralholes.py new file mode 100644 index 0000000..d0f53cd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_structuralholes.py @@ -0,0 +1,191 @@ +"""Unit tests for the :mod:`networkx.algorithms.structuralholes` module.""" + +import math + +import pytest + +import networkx as nx +from networkx.classes.tests import dispatch_interface + + +class TestStructuralHolesNoScipy: + """Unit tests for computing measures of structural holes. + + The expected values for these functions were originally computed using the + proprietary software `UCINET`_ and the free software `IGraph`_ , and then + computed by hand to make sure that the results are correct. + + .. _UCINET: https://sites.google.com/site/ucinetsoftware/home + .. _IGraph: http://igraph.org/ + + """ + + def setup_method(self): + self.D = nx.DiGraph() + self.D.add_edges_from([(0, 1), (0, 2), (1, 0), (2, 1)]) + self.D_weights = {(0, 1): 2, (0, 2): 2, (1, 0): 1, (2, 1): 1} + # Example from http://www.analytictech.com/connections/v20(1)/holes.htm + self.G = nx.Graph() + self.G.add_edges_from( + [ + ("A", "B"), + ("A", "F"), + ("A", "G"), + ("A", "E"), + ("E", "G"), + ("F", "G"), + ("B", "G"), + ("B", "D"), + ("D", "G"), + ("G", "C"), + ] + ) + self.G_weights = { + ("A", "B"): 2, + ("A", "F"): 3, + ("A", "G"): 5, + ("A", "E"): 2, + ("E", "G"): 8, + ("F", "G"): 3, + ("B", "G"): 4, + ("B", "D"): 1, + ("D", "G"): 3, + ("G", "C"): 10, + } + self.Dnodes = list(self.D) + self.Gnodes = list(self.G) + + def test_constraint_directed(self): + constraint = nx.constraint(self.D, nodes=self.Dnodes) + assert constraint[0] == pytest.approx(1.003, abs=1e-3) + assert constraint[1] == pytest.approx(1.003, abs=1e-3) + assert constraint[2] == pytest.approx(1.389, abs=1e-3) + + def test_effective_size_directed(self): + effective_size = nx.effective_size(self.D, nodes=self.Dnodes) + assert effective_size[0] == pytest.approx(1.167, abs=1e-3) + assert effective_size[1] == pytest.approx(1.167, abs=1e-3) + assert effective_size[2] == pytest.approx(1, abs=1e-3) + + def test_constraint_weighted_directed(self): + D = self.D.copy() + nx.set_edge_attributes(D, self.D_weights, "weight") + constraint = nx.constraint(D, weight="weight", nodes=self.Dnodes) + assert constraint[0] == pytest.approx(0.840, abs=1e-3) + assert constraint[1] == pytest.approx(1.143, abs=1e-3) + assert constraint[2] == pytest.approx(1.378, abs=1e-3) + + def test_effective_size_weighted_directed(self): + D = self.D.copy() + nx.set_edge_attributes(D, self.D_weights, "weight") + effective_size = nx.effective_size(D, weight="weight", nodes=self.Dnodes) + assert effective_size[0] == pytest.approx(1.567, abs=1e-3) + assert effective_size[1] == pytest.approx(1.083, abs=1e-3) + assert effective_size[2] == pytest.approx(1, abs=1e-3) + + def test_constraint_undirected(self): + constraint = nx.constraint(self.G, nodes=self.Gnodes) + assert constraint["G"] == pytest.approx(0.400, abs=1e-3) + assert constraint["A"] == pytest.approx(0.595, abs=1e-3) + assert constraint["C"] == pytest.approx(1, abs=1e-3) + + def test_effective_size_undirected_borgatti(self): + effective_size = nx.effective_size(self.G, nodes=self.Gnodes) + assert effective_size["G"] == pytest.approx(4.67, abs=1e-2) + assert effective_size["A"] == pytest.approx(2.50, abs=1e-2) + assert effective_size["C"] == pytest.approx(1, abs=1e-2) + + def test_effective_size_undirected(self): + G = self.G.copy() + nx.set_edge_attributes(G, 1, "weight") + effective_size = nx.effective_size(G, weight="weight", nodes=self.Gnodes) + assert effective_size["G"] == pytest.approx(4.67, abs=1e-2) + assert effective_size["A"] == pytest.approx(2.50, abs=1e-2) + assert effective_size["C"] == pytest.approx(1, abs=1e-2) + + def test_constraint_weighted_undirected(self): + G = self.G.copy() + nx.set_edge_attributes(G, self.G_weights, "weight") + constraint = nx.constraint(G, weight="weight", nodes=self.Gnodes) + assert constraint["G"] == pytest.approx(0.299, abs=1e-3) + assert constraint["A"] == pytest.approx(0.795, abs=1e-3) + assert constraint["C"] == pytest.approx(1, abs=1e-3) + + def test_effective_size_weighted_undirected(self): + G = self.G.copy() + nx.set_edge_attributes(G, self.G_weights, "weight") + effective_size = nx.effective_size(G, weight="weight", nodes=self.Gnodes) + assert effective_size["G"] == pytest.approx(5.47, abs=1e-2) + assert effective_size["A"] == pytest.approx(2.47, abs=1e-2) + assert effective_size["C"] == pytest.approx(1, abs=1e-2) + + def test_constraint_isolated(self): + G = self.G.copy() + G.add_node(1) + constraint = nx.constraint(G, nodes=self.Gnodes + [1]) + assert math.isnan(constraint[1]) + + def test_effective_size_isolated(self): + G = self.G.copy() + G.add_node(1) + nx.set_edge_attributes(G, self.G_weights, "weight") + effective_size = nx.effective_size(G, weight="weight", nodes=self.Gnodes + [1]) + assert math.isnan(effective_size[1]) + + def test_effective_size_borgatti_isolated(self): + G = self.G.copy() + G.add_node(1) + effective_size = nx.effective_size(G, nodes=self.Gnodes + [1]) + assert math.isnan(effective_size[1]) + + +class TestStructuralHoles(TestStructuralHolesNoScipy): + pytest.importorskip("scipy") + Dnodes = None + Gnodes = None + + +@pytest.mark.parametrize("graph", (nx.Graph, nx.DiGraph)) +@pytest.mark.parametrize("nodes", (None, [0])) +def test_effective_size_isolated_node_with_selfloop(graph, nodes): + """Behavior consistent with isolated node without self-loop. See gh-6916""" + G = graph([(0, 0)]) # Single node with one self-edge + assert math.isnan(nx.effective_size(G, nodes=nodes)[0]) + + +@pytest.mark.parametrize("graph", (nx.Graph, nx.DiGraph)) +@pytest.mark.parametrize("nodes", (None, [0])) +def test_effective_size_isolated_node_with_selfloop_weighted(graph, nodes): + """Weighted self-loop. See gh-6916""" + G = graph() + G.add_weighted_edges_from([(0, 0, 10)]) + assert math.isnan(nx.effective_size(G, nodes=nodes)[0]) + + +@pytest.mark.parametrize("graph", (nx.Graph, nx.DiGraph)) +def test_constraint_isolated_node_with_selfloop(graph): + """Behavior consistent with isolated node without self-loop. See gh-6916""" + G = graph([(0, 0)]) # Single node with one self-edge + assert math.isnan(nx.constraint(G)[0]) + + +@pytest.mark.parametrize("graph", (nx.Graph, nx.DiGraph)) +def test_constraint_isolated_node_with_selfloop_using_nodes_kwarg(graph): + """Behavior consistent with isolated node without self-loop. See gh-6916""" + G = graph([(0, 0)]) # Single node with one self-edge + assert nx.constraint(G, nodes=[0])[0] == 4 + + +@pytest.mark.parametrize("graph", (nx.Graph, nx.DiGraph)) +def test_constraint_isolated_node_with_selfloop_weighted(graph): + """Weighted self-loop. See gh-6916""" + G = graph() + G.add_weighted_edges_from([(0, 0, 10)]) + assert math.isnan(nx.constraint(G)[0]) + + +@pytest.mark.parametrize("graph", (nx.Graph, nx.DiGraph)) +def test_constraint_isolated_node_with_selfloop_weighted_using_nodes_kwarg(graph): + G = graph() + G.add_weighted_edges_from([(0, 0, 10)]) + assert nx.constraint(G, nodes=[0])[0] == 4 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_summarization.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_summarization.py new file mode 100644 index 0000000..c3bf82f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_summarization.py @@ -0,0 +1,642 @@ +""" +Unit tests for dedensification and graph summarization +""" + +import pytest + +import networkx as nx + + +class TestDirectedDedensification: + def build_original_graph(self): + original_matrix = [ + ("1", "BC"), + ("2", "ABC"), + ("3", ["A", "B", "6"]), + ("4", "ABC"), + ("5", "AB"), + ("6", ["5"]), + ("A", ["6"]), + ] + graph = nx.DiGraph() + for source, targets in original_matrix: + for target in targets: + graph.add_edge(source, target) + return graph + + def build_compressed_graph(self): + compressed_matrix = [ + ("1", "BC"), + ("2", ["ABC"]), + ("3", ["A", "B", "6"]), + ("4", ["ABC"]), + ("5", "AB"), + ("6", ["5"]), + ("A", ["6"]), + ("ABC", "ABC"), + ] + compressed_graph = nx.DiGraph() + for source, targets in compressed_matrix: + for target in targets: + compressed_graph.add_edge(source, target) + return compressed_graph + + def test_empty(self): + """ + Verify that an empty directed graph results in no compressor nodes + """ + G = nx.DiGraph() + compressed_graph, c_nodes = nx.dedensify(G, threshold=2) + assert c_nodes == set() + + @staticmethod + def densify(G, compressor_nodes, copy=True): + """ + Reconstructs the original graph from a dedensified, directed graph + + Parameters + ---------- + G: dedensified graph + A networkx graph + compressor_nodes: iterable + Iterable of compressor nodes in the dedensified graph + inplace: bool, optional (default: False) + Indicates if densification should be done inplace + + Returns + ------- + G: graph + A densified networkx graph + """ + if copy: + G = G.copy() + for compressor_node in compressor_nodes: + all_neighbors = set(nx.all_neighbors(G, compressor_node)) + out_neighbors = set(G.neighbors(compressor_node)) + for out_neighbor in out_neighbors: + G.remove_edge(compressor_node, out_neighbor) + in_neighbors = all_neighbors - out_neighbors + for in_neighbor in in_neighbors: + G.remove_edge(in_neighbor, compressor_node) + for out_neighbor in out_neighbors: + G.add_edge(in_neighbor, out_neighbor) + G.remove_node(compressor_node) + return G + + def setup_method(self): + self.c_nodes = ("ABC",) + + def test_dedensify_edges(self): + """ + Verifies that dedensify produced the correct edges to/from compressor + nodes in a directed graph + """ + G = self.build_original_graph() + compressed_G = self.build_compressed_graph() + compressed_graph, c_nodes = nx.dedensify(G, threshold=2) + for s, t in compressed_graph.edges(): + o_s = "".join(sorted(s)) + o_t = "".join(sorted(t)) + compressed_graph_exists = compressed_graph.has_edge(s, t) + verified_compressed_exists = compressed_G.has_edge(o_s, o_t) + assert compressed_graph_exists == verified_compressed_exists + assert len(c_nodes) == len(self.c_nodes) + + def test_dedensify_edge_count(self): + """ + Verifies that dedensify produced the correct number of compressor nodes + in a directed graph + """ + G = self.build_original_graph() + original_edge_count = len(G.edges()) + c_G, c_nodes = nx.dedensify(G, threshold=2) + compressed_edge_count = len(c_G.edges()) + assert compressed_edge_count <= original_edge_count + compressed_G = self.build_compressed_graph() + assert compressed_edge_count == len(compressed_G.edges()) + + def test_densify_edges(self): + """ + Verifies that densification produces the correct edges from the + original directed graph + """ + compressed_G = self.build_compressed_graph() + original_graph = self.densify(compressed_G, self.c_nodes, copy=True) + G = self.build_original_graph() + for s, t in G.edges(): + assert G.has_edge(s, t) == original_graph.has_edge(s, t) + + def test_densify_edge_count(self): + """ + Verifies that densification produces the correct number of edges in the + original directed graph + """ + compressed_G = self.build_compressed_graph() + compressed_edge_count = len(compressed_G.edges()) + original_graph = self.densify(compressed_G, self.c_nodes) + original_edge_count = len(original_graph.edges()) + assert compressed_edge_count <= original_edge_count + G = self.build_original_graph() + assert original_edge_count == len(G.edges()) + + +class TestUnDirectedDedensification: + def build_original_graph(self): + """ + Builds graph shown in the original research paper + """ + original_matrix = [ + ("1", "CB"), + ("2", "ABC"), + ("3", ["A", "B", "6"]), + ("4", "ABC"), + ("5", "AB"), + ("6", ["5"]), + ("A", ["6"]), + ] + graph = nx.Graph() + for source, targets in original_matrix: + for target in targets: + graph.add_edge(source, target) + return graph + + def test_empty(self): + """ + Verify that an empty undirected graph results in no compressor nodes + """ + G = nx.Graph() + compressed_G, c_nodes = nx.dedensify(G, threshold=2) + assert c_nodes == set() + + def setup_method(self): + self.c_nodes = ("6AB", "ABC") + + def build_compressed_graph(self): + compressed_matrix = [ + ("1", ["B", "C"]), + ("2", ["ABC"]), + ("3", ["6AB"]), + ("4", ["ABC"]), + ("5", ["6AB"]), + ("6", ["6AB", "A"]), + ("A", ["6AB", "ABC"]), + ("B", ["ABC", "6AB"]), + ("C", ["ABC"]), + ] + compressed_graph = nx.Graph() + for source, targets in compressed_matrix: + for target in targets: + compressed_graph.add_edge(source, target) + return compressed_graph + + def test_dedensify_edges(self): + """ + Verifies that dedensify produced correct compressor nodes and the + correct edges to/from the compressor nodes in an undirected graph + """ + G = self.build_original_graph() + c_G, c_nodes = nx.dedensify(G, threshold=2) + v_compressed_G = self.build_compressed_graph() + for s, t in c_G.edges(): + o_s = "".join(sorted(s)) + o_t = "".join(sorted(t)) + has_compressed_edge = c_G.has_edge(s, t) + verified_has_compressed_edge = v_compressed_G.has_edge(o_s, o_t) + assert has_compressed_edge == verified_has_compressed_edge + assert len(c_nodes) == len(self.c_nodes) + + def test_dedensify_edge_count(self): + """ + Verifies that dedensify produced the correct number of edges in an + undirected graph + """ + G = self.build_original_graph() + c_G, c_nodes = nx.dedensify(G, threshold=2, copy=True) + compressed_edge_count = len(c_G.edges()) + verified_original_edge_count = len(G.edges()) + assert compressed_edge_count <= verified_original_edge_count + verified_compressed_G = self.build_compressed_graph() + verified_compressed_edge_count = len(verified_compressed_G.edges()) + assert compressed_edge_count == verified_compressed_edge_count + + +@pytest.mark.parametrize( + "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] +) +def test_summarization_empty(graph_type): + G = graph_type() + summary_graph = nx.snap_aggregation(G, node_attributes=("color",)) + assert nx.is_isomorphic(summary_graph, G) + + +class AbstractSNAP: + node_attributes = ("color",) + + def build_original_graph(self): + pass + + def build_summary_graph(self): + pass + + def test_summary_graph(self): + original_graph = self.build_original_graph() + summary_graph = self.build_summary_graph() + + relationship_attributes = ("type",) + generated_summary_graph = nx.snap_aggregation( + original_graph, self.node_attributes, relationship_attributes + ) + relabeled_summary_graph = self.deterministic_labels(generated_summary_graph) + assert nx.is_isomorphic(summary_graph, relabeled_summary_graph) + + def deterministic_labels(self, G): + node_labels = list(G.nodes) + node_labels = sorted(node_labels, key=lambda n: sorted(G.nodes[n]["group"])[0]) + node_labels.sort() + + label_mapping = {} + for index, node in enumerate(node_labels): + label = f"Supernode-{index}" + label_mapping[node] = label + + return nx.relabel_nodes(G, label_mapping) + + +class TestSNAPNoEdgeTypes(AbstractSNAP): + relationship_attributes = () + + def test_summary_graph(self): + original_graph = self.build_original_graph() + summary_graph = self.build_summary_graph() + + relationship_attributes = ("type",) + generated_summary_graph = nx.snap_aggregation( + original_graph, self.node_attributes + ) + relabeled_summary_graph = self.deterministic_labels(generated_summary_graph) + assert nx.is_isomorphic(summary_graph, relabeled_summary_graph) + + def build_original_graph(self): + nodes = { + "A": {"color": "Red"}, + "B": {"color": "Red"}, + "C": {"color": "Red"}, + "D": {"color": "Red"}, + "E": {"color": "Blue"}, + "F": {"color": "Blue"}, + "G": {"color": "Blue"}, + "H": {"color": "Blue"}, + "I": {"color": "Yellow"}, + "J": {"color": "Yellow"}, + "K": {"color": "Yellow"}, + "L": {"color": "Yellow"}, + } + edges = [ + ("A", "B"), + ("A", "C"), + ("A", "E"), + ("A", "I"), + ("B", "D"), + ("B", "J"), + ("B", "F"), + ("C", "G"), + ("D", "H"), + ("I", "J"), + ("J", "K"), + ("I", "L"), + ] + G = nx.Graph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target in edges: + G.add_edge(source, target) + + return G + + def build_summary_graph(self): + nodes = { + "Supernode-0": {"color": "Red"}, + "Supernode-1": {"color": "Red"}, + "Supernode-2": {"color": "Blue"}, + "Supernode-3": {"color": "Blue"}, + "Supernode-4": {"color": "Yellow"}, + "Supernode-5": {"color": "Yellow"}, + } + edges = [ + ("Supernode-0", "Supernode-0"), + ("Supernode-0", "Supernode-1"), + ("Supernode-0", "Supernode-2"), + ("Supernode-0", "Supernode-4"), + ("Supernode-1", "Supernode-3"), + ("Supernode-4", "Supernode-4"), + ("Supernode-4", "Supernode-5"), + ] + G = nx.Graph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target in edges: + G.add_edge(source, target) + + supernodes = { + "Supernode-0": {"A", "B"}, + "Supernode-1": {"C", "D"}, + "Supernode-2": {"E", "F"}, + "Supernode-3": {"G", "H"}, + "Supernode-4": {"I", "J"}, + "Supernode-5": {"K", "L"}, + } + nx.set_node_attributes(G, supernodes, "group") + return G + + +class TestSNAPUndirected(AbstractSNAP): + def build_original_graph(self): + nodes = { + "A": {"color": "Red"}, + "B": {"color": "Red"}, + "C": {"color": "Red"}, + "D": {"color": "Red"}, + "E": {"color": "Blue"}, + "F": {"color": "Blue"}, + "G": {"color": "Blue"}, + "H": {"color": "Blue"}, + "I": {"color": "Yellow"}, + "J": {"color": "Yellow"}, + "K": {"color": "Yellow"}, + "L": {"color": "Yellow"}, + } + edges = [ + ("A", "B", "Strong"), + ("A", "C", "Weak"), + ("A", "E", "Strong"), + ("A", "I", "Weak"), + ("B", "D", "Weak"), + ("B", "J", "Weak"), + ("B", "F", "Strong"), + ("C", "G", "Weak"), + ("D", "H", "Weak"), + ("I", "J", "Strong"), + ("J", "K", "Strong"), + ("I", "L", "Strong"), + ] + G = nx.Graph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target, type in edges: + G.add_edge(source, target, type=type) + + return G + + def build_summary_graph(self): + nodes = { + "Supernode-0": {"color": "Red"}, + "Supernode-1": {"color": "Red"}, + "Supernode-2": {"color": "Blue"}, + "Supernode-3": {"color": "Blue"}, + "Supernode-4": {"color": "Yellow"}, + "Supernode-5": {"color": "Yellow"}, + } + edges = [ + ("Supernode-0", "Supernode-0", "Strong"), + ("Supernode-0", "Supernode-1", "Weak"), + ("Supernode-0", "Supernode-2", "Strong"), + ("Supernode-0", "Supernode-4", "Weak"), + ("Supernode-1", "Supernode-3", "Weak"), + ("Supernode-4", "Supernode-4", "Strong"), + ("Supernode-4", "Supernode-5", "Strong"), + ] + G = nx.Graph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target, type in edges: + G.add_edge(source, target, types=[{"type": type}]) + + supernodes = { + "Supernode-0": {"A", "B"}, + "Supernode-1": {"C", "D"}, + "Supernode-2": {"E", "F"}, + "Supernode-3": {"G", "H"}, + "Supernode-4": {"I", "J"}, + "Supernode-5": {"K", "L"}, + } + nx.set_node_attributes(G, supernodes, "group") + return G + + +class TestSNAPDirected(AbstractSNAP): + def build_original_graph(self): + nodes = { + "A": {"color": "Red"}, + "B": {"color": "Red"}, + "C": {"color": "Green"}, + "D": {"color": "Green"}, + "E": {"color": "Blue"}, + "F": {"color": "Blue"}, + "G": {"color": "Yellow"}, + "H": {"color": "Yellow"}, + } + edges = [ + ("A", "C", "Strong"), + ("A", "E", "Strong"), + ("A", "F", "Weak"), + ("B", "D", "Strong"), + ("B", "E", "Weak"), + ("B", "F", "Strong"), + ("C", "G", "Strong"), + ("C", "F", "Strong"), + ("D", "E", "Strong"), + ("D", "H", "Strong"), + ("G", "E", "Strong"), + ("H", "F", "Strong"), + ] + G = nx.DiGraph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target, type in edges: + G.add_edge(source, target, type=type) + + return G + + def build_summary_graph(self): + nodes = { + "Supernode-0": {"color": "Red"}, + "Supernode-1": {"color": "Green"}, + "Supernode-2": {"color": "Blue"}, + "Supernode-3": {"color": "Yellow"}, + } + edges = [ + ("Supernode-0", "Supernode-1", [{"type": "Strong"}]), + ("Supernode-0", "Supernode-2", [{"type": "Weak"}, {"type": "Strong"}]), + ("Supernode-1", "Supernode-2", [{"type": "Strong"}]), + ("Supernode-1", "Supernode-3", [{"type": "Strong"}]), + ("Supernode-3", "Supernode-2", [{"type": "Strong"}]), + ] + G = nx.DiGraph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target, types in edges: + G.add_edge(source, target, types=types) + + supernodes = { + "Supernode-0": {"A", "B"}, + "Supernode-1": {"C", "D"}, + "Supernode-2": {"E", "F"}, + "Supernode-3": {"G", "H"}, + "Supernode-4": {"I", "J"}, + "Supernode-5": {"K", "L"}, + } + nx.set_node_attributes(G, supernodes, "group") + return G + + +class TestSNAPUndirectedMulti(AbstractSNAP): + def build_original_graph(self): + nodes = { + "A": {"color": "Red"}, + "B": {"color": "Red"}, + "C": {"color": "Red"}, + "D": {"color": "Blue"}, + "E": {"color": "Blue"}, + "F": {"color": "Blue"}, + "G": {"color": "Yellow"}, + "H": {"color": "Yellow"}, + "I": {"color": "Yellow"}, + } + edges = [ + ("A", "D", ["Weak", "Strong"]), + ("B", "E", ["Weak", "Strong"]), + ("D", "I", ["Strong"]), + ("E", "H", ["Strong"]), + ("F", "G", ["Weak"]), + ("I", "G", ["Weak", "Strong"]), + ("I", "H", ["Weak", "Strong"]), + ("G", "H", ["Weak", "Strong"]), + ] + G = nx.MultiGraph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target, types in edges: + for type in types: + G.add_edge(source, target, type=type) + + return G + + def build_summary_graph(self): + nodes = { + "Supernode-0": {"color": "Red"}, + "Supernode-1": {"color": "Blue"}, + "Supernode-2": {"color": "Yellow"}, + "Supernode-3": {"color": "Blue"}, + "Supernode-4": {"color": "Yellow"}, + "Supernode-5": {"color": "Red"}, + } + edges = [ + ("Supernode-1", "Supernode-2", [{"type": "Weak"}]), + ("Supernode-2", "Supernode-4", [{"type": "Weak"}, {"type": "Strong"}]), + ("Supernode-3", "Supernode-4", [{"type": "Strong"}]), + ("Supernode-3", "Supernode-5", [{"type": "Weak"}, {"type": "Strong"}]), + ("Supernode-4", "Supernode-4", [{"type": "Weak"}, {"type": "Strong"}]), + ] + G = nx.MultiGraph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target, types in edges: + for type in types: + G.add_edge(source, target, type=type) + + supernodes = { + "Supernode-0": {"A", "B"}, + "Supernode-1": {"C", "D"}, + "Supernode-2": {"E", "F"}, + "Supernode-3": {"G", "H"}, + "Supernode-4": {"I", "J"}, + "Supernode-5": {"K", "L"}, + } + nx.set_node_attributes(G, supernodes, "group") + return G + + +class TestSNAPDirectedMulti(AbstractSNAP): + def build_original_graph(self): + nodes = { + "A": {"color": "Red"}, + "B": {"color": "Red"}, + "C": {"color": "Green"}, + "D": {"color": "Green"}, + "E": {"color": "Blue"}, + "F": {"color": "Blue"}, + "G": {"color": "Yellow"}, + "H": {"color": "Yellow"}, + } + edges = [ + ("A", "C", ["Weak", "Strong"]), + ("A", "E", ["Strong"]), + ("A", "F", ["Weak"]), + ("B", "D", ["Weak", "Strong"]), + ("B", "E", ["Weak"]), + ("B", "F", ["Strong"]), + ("C", "G", ["Weak", "Strong"]), + ("C", "F", ["Strong"]), + ("D", "E", ["Strong"]), + ("D", "H", ["Weak", "Strong"]), + ("G", "E", ["Strong"]), + ("H", "F", ["Strong"]), + ] + G = nx.MultiDiGraph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target, types in edges: + for type in types: + G.add_edge(source, target, type=type) + + return G + + def build_summary_graph(self): + nodes = { + "Supernode-0": {"color": "Red"}, + "Supernode-1": {"color": "Blue"}, + "Supernode-2": {"color": "Yellow"}, + "Supernode-3": {"color": "Blue"}, + } + edges = [ + ("Supernode-0", "Supernode-1", ["Weak", "Strong"]), + ("Supernode-0", "Supernode-2", ["Weak", "Strong"]), + ("Supernode-1", "Supernode-2", ["Strong"]), + ("Supernode-1", "Supernode-3", ["Weak", "Strong"]), + ("Supernode-3", "Supernode-2", ["Strong"]), + ] + G = nx.MultiDiGraph() + for node in nodes: + attributes = nodes[node] + G.add_node(node, **attributes) + + for source, target, types in edges: + for type in types: + G.add_edge(source, target, type=type) + + supernodes = { + "Supernode-0": {"A", "B"}, + "Supernode-1": {"C", "D"}, + "Supernode-2": {"E", "F"}, + "Supernode-3": {"G", "H"}, + } + nx.set_node_attributes(G, supernodes, "group") + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_swap.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_swap.py new file mode 100644 index 0000000..e765bd5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_swap.py @@ -0,0 +1,179 @@ +import pytest + +import networkx as nx + +cycle = nx.cycle_graph(5, create_using=nx.DiGraph) +tree = nx.DiGraph() +tree.add_edges_from(nx.random_labeled_tree(10, seed=42).edges) +path = nx.path_graph(5, create_using=nx.DiGraph) +binomial = nx.binomial_tree(3, create_using=nx.DiGraph) +HH = nx.directed_havel_hakimi_graph([1, 2, 1, 2, 2, 2], [3, 1, 0, 1, 2, 3]) +balanced_tree = nx.balanced_tree(2, 3, create_using=nx.DiGraph) + + +@pytest.mark.parametrize("G", [path, binomial, HH, cycle, tree, balanced_tree]) +def test_directed_edge_swap(G): + in_degree = set(G.in_degree) + out_degree = set(G.out_degree) + edges = set(G.edges) + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert in_degree == set(G.in_degree) + assert out_degree == set(G.out_degree) + assert edges != set(G.edges) + assert 3 == sum(e not in edges for e in G.edges) + + +def test_directed_edge_swap_undo_previous_swap(): + G = nx.DiGraph(nx.path_graph(4).edges) # only 1 swap possible + edges = set(G.edges) + nx.directed_edge_swap(G, nswap=2, max_tries=100) + assert edges == set(G.edges) + + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert {(0, 2), (1, 3), (2, 1)} == set(G.edges) + nx.directed_edge_swap(G, nswap=1, max_tries=100, seed=1) + assert edges == set(G.edges) + + +def test_edge_cases_directed_edge_swap(): + # Tests cases when swaps are impossible, either too few edges exist, or self loops/cycles are unavoidable + # TODO: Rewrite function to explicitly check for impossible swaps and raise error + e = ( + "Maximum number of swap attempts \\(11\\) exceeded " + "before desired swaps achieved \\(\\d\\)." + ) + graph = nx.DiGraph([(0, 0), (0, 1), (1, 0), (2, 3), (3, 2)]) + with pytest.raises(nx.NetworkXAlgorithmError, match=e): + nx.directed_edge_swap(graph, nswap=1, max_tries=10, seed=1) + + +def test_double_edge_swap(): + graph = nx.barabasi_albert_graph(200, 1) + degrees = sorted(d for n, d in graph.degree()) + G = nx.double_edge_swap(graph, 40) + assert degrees == sorted(d for n, d in graph.degree()) + + +def test_double_edge_swap_seed(): + graph = nx.barabasi_albert_graph(200, 1) + degrees = sorted(d for n, d in graph.degree()) + G = nx.double_edge_swap(graph, 40, seed=1) + assert degrees == sorted(d for n, d in graph.degree()) + + +def test_connected_double_edge_swap(): + graph = nx.barabasi_albert_graph(200, 1) + degrees = sorted(d for n, d in graph.degree()) + G = nx.connected_double_edge_swap(graph, 40, seed=1) + assert nx.is_connected(graph) + assert degrees == sorted(d for n, d in graph.degree()) + + +def test_connected_double_edge_swap_low_window_threshold(): + graph = nx.barabasi_albert_graph(200, 1) + degrees = sorted(d for n, d in graph.degree()) + G = nx.connected_double_edge_swap(graph, 40, _window_threshold=0, seed=1) + assert nx.is_connected(graph) + assert degrees == sorted(d for n, d in graph.degree()) + + +def test_connected_double_edge_swap_star(): + # Testing ui==xi in connected_double_edge_swap + graph = nx.star_graph(40) + degrees = sorted(d for n, d in graph.degree()) + G = nx.connected_double_edge_swap(graph, 1, seed=4) + assert nx.is_connected(graph) + assert degrees == sorted(d for n, d in graph.degree()) + + +def test_connected_double_edge_swap_star_low_window_threshold(): + # Testing ui==xi in connected_double_edge_swap with low window threshold + graph = nx.star_graph(40) + degrees = sorted(d for n, d in graph.degree()) + G = nx.connected_double_edge_swap(graph, 1, _window_threshold=0, seed=4) + assert nx.is_connected(graph) + assert degrees == sorted(d for n, d in graph.degree()) + + +def test_directed_edge_swap_small(): + with pytest.raises(nx.NetworkXError): + G = nx.directed_edge_swap(nx.path_graph(3, create_using=nx.DiGraph)) + + +def test_directed_edge_swap_tries(): + with pytest.raises(nx.NetworkXError): + G = nx.directed_edge_swap( + nx.path_graph(3, create_using=nx.DiGraph), nswap=1, max_tries=0 + ) + + +def test_directed_exception_undirected(): + graph = nx.Graph([(0, 1), (2, 3)]) + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.directed_edge_swap(graph) + + +def test_directed_edge_max_tries(): + with pytest.raises(nx.NetworkXAlgorithmError): + G = nx.directed_edge_swap( + nx.complete_graph(4, nx.DiGraph()), nswap=1, max_tries=5 + ) + + +def test_double_edge_swap_small(): + with pytest.raises(nx.NetworkXError): + G = nx.double_edge_swap(nx.path_graph(3)) + + +def test_double_edge_swap_tries(): + with pytest.raises(nx.NetworkXError): + G = nx.double_edge_swap(nx.path_graph(10), nswap=1, max_tries=0) + + +def test_double_edge_directed(): + graph = nx.DiGraph([(0, 1), (2, 3)]) + with pytest.raises(nx.NetworkXError, match="not defined for directed graphs."): + G = nx.double_edge_swap(graph) + + +def test_double_edge_max_tries(): + with pytest.raises(nx.NetworkXAlgorithmError): + G = nx.double_edge_swap(nx.complete_graph(4), nswap=1, max_tries=5) + + +def test_connected_double_edge_swap_small(): + with pytest.raises(nx.NetworkXError): + G = nx.connected_double_edge_swap(nx.path_graph(3)) + + +def test_connected_double_edge_swap_not_connected(): + with pytest.raises(nx.NetworkXError): + G = nx.path_graph(3) + nx.add_path(G, [10, 11, 12]) + G = nx.connected_double_edge_swap(G) + + +def test_degree_seq_c4(): + G = nx.cycle_graph(4) + degrees = sorted(d for n, d in G.degree()) + G = nx.double_edge_swap(G, 1, 100) + assert degrees == sorted(d for n, d in G.degree()) + + +def test_fewer_than_4_nodes(): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2]) + with pytest.raises(nx.NetworkXError, match=".*fewer than four nodes."): + nx.directed_edge_swap(G) + + +def test_less_than_3_edges(): + G = nx.DiGraph([(0, 1), (1, 2)]) + G.add_nodes_from([3, 4]) + with pytest.raises(nx.NetworkXError, match=".*fewer than 3 edges"): + nx.directed_edge_swap(G) + + G = nx.Graph() + G.add_nodes_from([0, 1, 2, 3]) + with pytest.raises(nx.NetworkXError, match=".*fewer than 2 edges"): + nx.double_edge_swap(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_threshold.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_threshold.py new file mode 100644 index 0000000..19fc7f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_threshold.py @@ -0,0 +1,266 @@ +""" +Threshold Graphs +================ +""" + +import pytest + +import networkx as nx +import networkx.algorithms.threshold as nxt + +cnlti = nx.convert_node_labels_to_integers + + +class TestGeneratorThreshold: + def test_threshold_sequence_graph_test(self): + G = nx.star_graph(10) + assert nxt.is_threshold_graph(G) + assert nxt.is_threshold_sequence([d for n, d in G.degree()]) + + G = nx.complete_graph(10) + assert nxt.is_threshold_graph(G) + assert nxt.is_threshold_sequence([d for n, d in G.degree()]) + + deg = [3, 2, 2, 1, 1, 1] + assert not nxt.is_threshold_sequence(deg) + + deg = [3, 2, 2, 1] + assert nxt.is_threshold_sequence(deg) + + G = nx.generators.havel_hakimi_graph(deg) + assert nxt.is_threshold_graph(G) + + def test_creation_sequences(self): + deg = [3, 2, 2, 1] + G = nx.generators.havel_hakimi_graph(deg) + + with pytest.raises(ValueError): + nxt.creation_sequence(deg, with_labels=True, compact=True) + + cs0 = nxt.creation_sequence(deg) + H0 = nxt.threshold_graph(cs0) + assert "".join(cs0) == "ddid" + + cs1 = nxt.creation_sequence(deg, with_labels=True) + H1 = nxt.threshold_graph(cs1) + assert cs1 == [(1, "d"), (2, "d"), (3, "i"), (0, "d")] + + cs2 = nxt.creation_sequence(deg, compact=True) + H2 = nxt.threshold_graph(cs2) + assert cs2 == [2, 1, 1] + assert "".join(nxt.uncompact(cs2)) == "ddid" + assert nx.could_be_isomorphic(H0, G) + assert nx.could_be_isomorphic(H0, H1) + assert nx.could_be_isomorphic(H0, H2) + + def test_make_compact(self): + assert nxt.make_compact(["d", "d", "d", "i", "d", "d"]) == [3, 1, 2] + assert nxt.make_compact([3, 1, 2]) == [3, 1, 2] + pytest.raises(TypeError, nxt.make_compact, [3.0, 1.0, 2.0]) + + def test_uncompact(self): + assert nxt.uncompact([3, 1, 2]) == ["d", "d", "d", "i", "d", "d"] + assert nxt.uncompact(["d", "d", "i", "d"]) == ["d", "d", "i", "d"] + assert nxt.uncompact( + nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")]) + ) == nxt.uncompact([(1, "d"), (2, "d"), (3, "i"), (0, "d")]) + pytest.raises(TypeError, nxt.uncompact, [3.0, 1.0, 2.0]) + + def test_creation_sequence_to_weights(self): + assert nxt.creation_sequence_to_weights([3, 1, 2]) == [ + 0.5, + 0.5, + 0.5, + 0.25, + 0.75, + 0.75, + ] + pytest.raises(TypeError, nxt.creation_sequence_to_weights, [3.0, 1.0, 2.0]) + + def test_weights_to_creation_sequence(self): + deg = [3, 2, 2, 1] + with pytest.raises(ValueError): + nxt.weights_to_creation_sequence(deg, with_labels=True, compact=True) + assert nxt.weights_to_creation_sequence(deg, with_labels=True) == [ + (3, "d"), + (1, "d"), + (2, "d"), + (0, "d"), + ] + assert nxt.weights_to_creation_sequence(deg, compact=True) == [4] + + def test_find_alternating_4_cycle(self): + G = nx.Graph() + G.add_edge(1, 2) + assert not nxt.find_alternating_4_cycle(G) + + def test_shortest_path(self): + deg = [3, 2, 2, 1] + G = nx.generators.havel_hakimi_graph(deg) + cs1 = nxt.creation_sequence(deg, with_labels=True) + for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3), (3, 1), (1, 2), (2, 3)]: + assert nxt.shortest_path(cs1, n, m) == nx.shortest_path(G, n, m) + + spl = nxt.shortest_path_length(cs1, 3) + spl2 = nxt.shortest_path_length([t for v, t in cs1], 2) + assert spl == spl2 + + spld = {} + for j, pl in enumerate(spl): + n = cs1[j][0] + spld[n] = pl + assert spld == nx.single_source_shortest_path_length(G, 3) + + assert nxt.shortest_path(["d", "d", "d", "i", "d", "d"], 1, 2) == [1, 2] + assert nxt.shortest_path([3, 1, 2], 1, 2) == [1, 2] + pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1, 2) + pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], "a", 2) + pytest.raises(ValueError, nxt.shortest_path, [3, 1, 2], 1, "b") + assert nxt.shortest_path([3, 1, 2], 1, 1) == [1] + + def test_shortest_path_length(self): + assert nxt.shortest_path_length([3, 1, 2], 1) == [1, 0, 1, 2, 1, 1] + assert nxt.shortest_path_length(["d", "d", "d", "i", "d", "d"], 1) == [ + 1, + 0, + 1, + 2, + 1, + 1, + ] + assert nxt.shortest_path_length(("d", "d", "d", "i", "d", "d"), 1) == [ + 1, + 0, + 1, + 2, + 1, + 1, + ] + pytest.raises(TypeError, nxt.shortest_path, [3.0, 1.0, 2.0], 1) + + def test_random_threshold_sequence(self): + assert len(nxt.random_threshold_sequence(10, 0.5)) == 10 + assert nxt.random_threshold_sequence(10, 0.5, seed=42) == [ + "d", + "i", + "d", + "d", + "d", + "i", + "i", + "i", + "d", + "d", + ] + pytest.raises(ValueError, nxt.random_threshold_sequence, 10, 1.5) + + def test_right_d_threshold_sequence(self): + assert nxt.right_d_threshold_sequence(3, 2) == ["d", "i", "d"] + pytest.raises(ValueError, nxt.right_d_threshold_sequence, 2, 3) + + def test_left_d_threshold_sequence(self): + assert nxt.left_d_threshold_sequence(3, 2) == ["d", "i", "d"] + pytest.raises(ValueError, nxt.left_d_threshold_sequence, 2, 3) + + def test_weights_thresholds(self): + wseq = [3, 4, 3, 3, 5, 6, 5, 4, 5, 6] + cs = nxt.weights_to_creation_sequence(wseq, threshold=10) + wseq = nxt.creation_sequence_to_weights(cs) + cs2 = nxt.weights_to_creation_sequence(wseq) + assert cs == cs2 + + wseq = nxt.creation_sequence_to_weights(nxt.uncompact([3, 1, 2, 3, 3, 2, 3])) + assert wseq == [ + s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7] + ] + + wseq = nxt.creation_sequence_to_weights([3, 1, 2, 3, 3, 2, 3]) + assert wseq == [ + s * 0.125 for s in [4, 4, 4, 3, 5, 5, 2, 2, 2, 6, 6, 6, 1, 1, 7, 7, 7] + ] + + wseq = nxt.creation_sequence_to_weights(list(enumerate("ddidiiidididi"))) + assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]] + + wseq = nxt.creation_sequence_to_weights("ddidiiidididi") + assert wseq == [s * 0.1 for s in [5, 5, 4, 6, 3, 3, 3, 7, 2, 8, 1, 9, 0]] + + wseq = nxt.creation_sequence_to_weights("ddidiiidididid") + ws = [s / 12 for s in [6, 6, 5, 7, 4, 4, 4, 8, 3, 9, 2, 10, 1, 11]] + assert sum(abs(c - d) for c, d in zip(wseq, ws)) < 1e-14 + + def test_finding_routines(self): + G = nx.Graph({1: [2], 2: [3], 3: [4], 4: [5], 5: [6]}) + G.add_edge(2, 4) + G.add_edge(2, 5) + G.add_edge(2, 7) + G.add_edge(3, 6) + G.add_edge(4, 6) + + # Alternating 4 cycle + assert nxt.find_alternating_4_cycle(G) == [1, 2, 3, 6] + + # Threshold graph + TG = nxt.find_threshold_graph(G) + assert nxt.is_threshold_graph(TG) + assert sorted(TG.nodes()) == [1, 2, 3, 4, 5, 7] + + cs = nxt.creation_sequence(dict(TG.degree()), with_labels=True) + assert nxt.find_creation_sequence(G) == cs + + def test_fast_versions_properties_threshold_graphs(self): + cs = "ddiiddid" + G = nxt.threshold_graph(cs) + assert nxt.density("ddiiddid") == nx.density(G) + assert sorted(nxt.degree_sequence(cs)) == sorted(d for n, d in G.degree()) + + ts = nxt.triangle_sequence(cs) + assert ts == list(nx.triangles(G).values()) + assert sum(ts) // 3 == nxt.triangles(cs) + + c1 = nxt.cluster_sequence(cs) + c2 = list(nx.clustering(G).values()) + assert sum(abs(c - d) for c, d in zip(c1, c2)) == pytest.approx(0, abs=1e-7) + + b1 = nx.betweenness_centrality(G).values() + b2 = nxt.betweenness_sequence(cs) + assert sum(abs(c - d) for c, d in zip(b1, b2)) < 1e-7 + + assert nxt.eigenvalues(cs) == [0, 1, 3, 3, 5, 7, 7, 8] + + # Degree Correlation + assert abs(nxt.degree_correlation(cs) + 0.593038821954) < 1e-12 + assert nxt.degree_correlation("diiiddi") == -0.8 + assert nxt.degree_correlation("did") == -1.0 + assert nxt.degree_correlation("ddd") == 1.0 + assert nxt.eigenvalues("dddiii") == [0, 0, 0, 0, 3, 3] + assert nxt.eigenvalues("dddiiid") == [0, 1, 1, 1, 4, 4, 7] + + def test_tg_creation_routines(self): + s = nxt.left_d_threshold_sequence(5, 7) + s = nxt.right_d_threshold_sequence(5, 7) + s1 = nxt.swap_d(s, 1.0, 1.0) + s1 = nxt.swap_d(s, 1.0, 1.0, seed=1) + + def test_eigenvectors(self): + np = pytest.importorskip("numpy") + eigenval = np.linalg.eigvals + pytest.importorskip("scipy") + + cs = "ddiiddid" + G = nxt.threshold_graph(cs) + (tgeval, tgevec) = nxt.eigenvectors(cs) + np.testing.assert_allclose([np.dot(lv, lv) for lv in tgevec], 1.0, rtol=1e-9) + lapl = nx.laplacian_matrix(G) + + def test_create_using(self): + cs = "ddiiddid" + G = nxt.threshold_graph(cs) + pytest.raises( + nx.exception.NetworkXError, + nxt.threshold_graph, + cs, + create_using=nx.DiGraph(), + ) + MG = nxt.threshold_graph(cs, create_using=nx.MultiGraph()) + assert sorted(MG.edges()) == sorted(G.edges()) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_time_dependent.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_time_dependent.py new file mode 100644 index 0000000..1e256f4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_time_dependent.py @@ -0,0 +1,431 @@ +"""Unit testing for time dependent algorithms.""" + +from datetime import datetime, timedelta + +import pytest + +import networkx as nx + +_delta = timedelta(days=5 * 365) + + +class TestCdIndex: + """Unit testing for the cd index function.""" + + def test_common_graph(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(4, 2) + G.add_edge(4, 0) + G.add_edge(4, 1) + G.add_edge(4, 3) + G.add_edge(5, 2) + G.add_edge(6, 2) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(9, 3) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": datetime(1992, 1, 1)}, + 1: {"time": datetime(1992, 1, 1)}, + 2: {"time": datetime(1993, 1, 1)}, + 3: {"time": datetime(1993, 1, 1)}, + 4: {"time": datetime(1995, 1, 1)}, + 5: {"time": datetime(1997, 1, 1)}, + 6: {"time": datetime(1998, 1, 1)}, + 7: {"time": datetime(1999, 1, 1)}, + 8: {"time": datetime(1999, 1, 1)}, + 9: {"time": datetime(1998, 1, 1)}, + 10: {"time": datetime(1997, 4, 1)}, + } + + nx.set_node_attributes(G, node_attrs) + + assert nx.cd_index(G, 4, time_delta=_delta) == 0.17 + + def test_common_graph_with_given_attributes(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(4, 2) + G.add_edge(4, 0) + G.add_edge(4, 1) + G.add_edge(4, 3) + G.add_edge(5, 2) + G.add_edge(6, 2) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(9, 3) + G.add_edge(10, 4) + + node_attrs = { + 0: {"date": datetime(1992, 1, 1)}, + 1: {"date": datetime(1992, 1, 1)}, + 2: {"date": datetime(1993, 1, 1)}, + 3: {"date": datetime(1993, 1, 1)}, + 4: {"date": datetime(1995, 1, 1)}, + 5: {"date": datetime(1997, 1, 1)}, + 6: {"date": datetime(1998, 1, 1)}, + 7: {"date": datetime(1999, 1, 1)}, + 8: {"date": datetime(1999, 1, 1)}, + 9: {"date": datetime(1998, 1, 1)}, + 10: {"date": datetime(1997, 4, 1)}, + } + + nx.set_node_attributes(G, node_attrs) + + assert nx.cd_index(G, 4, time_delta=_delta, time="date") == 0.17 + + def test_common_graph_with_int_attributes(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(4, 2) + G.add_edge(4, 0) + G.add_edge(4, 1) + G.add_edge(4, 3) + G.add_edge(5, 2) + G.add_edge(6, 2) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(9, 3) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": 20}, + 1: {"time": 20}, + 2: {"time": 30}, + 3: {"time": 30}, + 4: {"time": 50}, + 5: {"time": 70}, + 6: {"time": 80}, + 7: {"time": 90}, + 8: {"time": 90}, + 9: {"time": 80}, + 10: {"time": 74}, + } + + nx.set_node_attributes(G, node_attrs) + + assert nx.cd_index(G, 4, time_delta=50) == 0.17 + + def test_common_graph_with_float_attributes(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(4, 2) + G.add_edge(4, 0) + G.add_edge(4, 1) + G.add_edge(4, 3) + G.add_edge(5, 2) + G.add_edge(6, 2) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(9, 3) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": 20.2}, + 1: {"time": 20.2}, + 2: {"time": 30.7}, + 3: {"time": 30.7}, + 4: {"time": 50.9}, + 5: {"time": 70.1}, + 6: {"time": 80.6}, + 7: {"time": 90.7}, + 8: {"time": 90.7}, + 9: {"time": 80.6}, + 10: {"time": 74.2}, + } + + nx.set_node_attributes(G, node_attrs) + + assert nx.cd_index(G, 4, time_delta=50) == 0.17 + + def test_common_graph_with_weights(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(4, 2) + G.add_edge(4, 0) + G.add_edge(4, 1) + G.add_edge(4, 3) + G.add_edge(5, 2) + G.add_edge(6, 2) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(9, 3) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": datetime(1992, 1, 1)}, + 1: {"time": datetime(1992, 1, 1)}, + 2: {"time": datetime(1993, 1, 1)}, + 3: {"time": datetime(1993, 1, 1)}, + 4: {"time": datetime(1995, 1, 1)}, + 5: {"time": datetime(1997, 1, 1)}, + 6: {"time": datetime(1998, 1, 1), "weight": 5}, + 7: {"time": datetime(1999, 1, 1), "weight": 2}, + 8: {"time": datetime(1999, 1, 1), "weight": 6}, + 9: {"time": datetime(1998, 1, 1), "weight": 3}, + 10: {"time": datetime(1997, 4, 1), "weight": 10}, + } + + nx.set_node_attributes(G, node_attrs) + assert nx.cd_index(G, 4, time_delta=_delta, weight="weight") == 0.04 + + def test_node_with_no_predecessors(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(4, 2) + G.add_edge(4, 0) + G.add_edge(4, 3) + G.add_edge(5, 2) + G.add_edge(6, 2) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(9, 3) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": datetime(1992, 1, 1)}, + 1: {"time": datetime(1992, 1, 1)}, + 2: {"time": datetime(1993, 1, 1)}, + 3: {"time": datetime(1993, 1, 1)}, + 4: {"time": datetime(1995, 1, 1)}, + 5: {"time": datetime(2005, 1, 1)}, + 6: {"time": datetime(2010, 1, 1)}, + 7: {"time": datetime(2001, 1, 1)}, + 8: {"time": datetime(2020, 1, 1)}, + 9: {"time": datetime(2017, 1, 1)}, + 10: {"time": datetime(2004, 4, 1)}, + } + + nx.set_node_attributes(G, node_attrs) + assert nx.cd_index(G, 4, time_delta=_delta) == 0.0 + + def test_node_with_no_successors(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(8, 2) + G.add_edge(6, 0) + G.add_edge(6, 3) + G.add_edge(5, 2) + G.add_edge(6, 2) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(9, 3) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": datetime(1992, 1, 1)}, + 1: {"time": datetime(1992, 1, 1)}, + 2: {"time": datetime(1993, 1, 1)}, + 3: {"time": datetime(1993, 1, 1)}, + 4: {"time": datetime(1995, 1, 1)}, + 5: {"time": datetime(1997, 1, 1)}, + 6: {"time": datetime(1998, 1, 1)}, + 7: {"time": datetime(1999, 1, 1)}, + 8: {"time": datetime(1999, 1, 1)}, + 9: {"time": datetime(1998, 1, 1)}, + 10: {"time": datetime(1997, 4, 1)}, + } + + nx.set_node_attributes(G, node_attrs) + assert nx.cd_index(G, 4, time_delta=_delta) == 1.0 + + def test_n_equals_zero(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(4, 2) + G.add_edge(4, 0) + G.add_edge(4, 3) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": datetime(1992, 1, 1)}, + 1: {"time": datetime(1992, 1, 1)}, + 2: {"time": datetime(1993, 1, 1)}, + 3: {"time": datetime(1993, 1, 1)}, + 4: {"time": datetime(1995, 1, 1)}, + 5: {"time": datetime(2005, 1, 1)}, + 6: {"time": datetime(2010, 1, 1)}, + 7: {"time": datetime(2001, 1, 1)}, + 8: {"time": datetime(2020, 1, 1)}, + 9: {"time": datetime(2017, 1, 1)}, + 10: {"time": datetime(2004, 4, 1)}, + } + + nx.set_node_attributes(G, node_attrs) + + with pytest.raises( + nx.NetworkXError, match="The cd index cannot be defined." + ) as ve: + nx.cd_index(G, 4, time_delta=_delta) + + def test_time_timedelta_compatibility(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(4, 2) + G.add_edge(4, 0) + G.add_edge(4, 3) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": 20.2}, + 1: {"time": 20.2}, + 2: {"time": 30.7}, + 3: {"time": 30.7}, + 4: {"time": 50.9}, + 5: {"time": 70.1}, + 6: {"time": 80.6}, + 7: {"time": 90.7}, + 8: {"time": 90.7}, + 9: {"time": 80.6}, + 10: {"time": 74.2}, + } + + nx.set_node_attributes(G, node_attrs) + + with pytest.raises( + nx.NetworkXError, + match="Addition and comparison are not supported between", + ) as ve: + nx.cd_index(G, 4, time_delta=_delta) + + def test_node_with_no_time(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + G.add_edge(8, 2) + G.add_edge(6, 0) + G.add_edge(6, 3) + G.add_edge(5, 2) + G.add_edge(6, 2) + G.add_edge(6, 4) + G.add_edge(7, 4) + G.add_edge(8, 4) + G.add_edge(9, 4) + G.add_edge(9, 1) + G.add_edge(9, 3) + G.add_edge(10, 4) + + node_attrs = { + 0: {"time": datetime(1992, 1, 1)}, + 1: {"time": datetime(1992, 1, 1)}, + 2: {"time": datetime(1993, 1, 1)}, + 3: {"time": datetime(1993, 1, 1)}, + 4: {"time": datetime(1995, 1, 1)}, + 6: {"time": datetime(1998, 1, 1)}, + 7: {"time": datetime(1999, 1, 1)}, + 8: {"time": datetime(1999, 1, 1)}, + 9: {"time": datetime(1998, 1, 1)}, + 10: {"time": datetime(1997, 4, 1)}, + } + + nx.set_node_attributes(G, node_attrs) + + with pytest.raises( + nx.NetworkXError, match="Not all nodes have a 'time' attribute." + ) as ve: + nx.cd_index(G, 4, time_delta=_delta) + + def test_maximally_consolidating(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + G.add_edge(5, 1) + G.add_edge(5, 2) + G.add_edge(5, 3) + G.add_edge(5, 4) + G.add_edge(6, 1) + G.add_edge(6, 5) + G.add_edge(7, 1) + G.add_edge(7, 5) + G.add_edge(8, 2) + G.add_edge(8, 5) + G.add_edge(9, 5) + G.add_edge(9, 3) + G.add_edge(10, 5) + G.add_edge(10, 3) + G.add_edge(10, 4) + G.add_edge(11, 5) + G.add_edge(11, 4) + + node_attrs = { + 0: {"time": datetime(1992, 1, 1)}, + 1: {"time": datetime(1992, 1, 1)}, + 2: {"time": datetime(1993, 1, 1)}, + 3: {"time": datetime(1993, 1, 1)}, + 4: {"time": datetime(1995, 1, 1)}, + 5: {"time": datetime(1997, 1, 1)}, + 6: {"time": datetime(1998, 1, 1)}, + 7: {"time": datetime(1999, 1, 1)}, + 8: {"time": datetime(1999, 1, 1)}, + 9: {"time": datetime(1998, 1, 1)}, + 10: {"time": datetime(1997, 4, 1)}, + 11: {"time": datetime(1998, 5, 1)}, + } + + nx.set_node_attributes(G, node_attrs) + + assert nx.cd_index(G, 5, time_delta=_delta) == -1 + + def test_maximally_destabilizing(self): + G = nx.DiGraph() + G.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + G.add_edge(5, 1) + G.add_edge(5, 2) + G.add_edge(5, 3) + G.add_edge(5, 4) + G.add_edge(6, 5) + G.add_edge(7, 5) + G.add_edge(8, 5) + G.add_edge(9, 5) + G.add_edge(10, 5) + G.add_edge(11, 5) + + node_attrs = { + 0: {"time": datetime(1992, 1, 1)}, + 1: {"time": datetime(1992, 1, 1)}, + 2: {"time": datetime(1993, 1, 1)}, + 3: {"time": datetime(1993, 1, 1)}, + 4: {"time": datetime(1995, 1, 1)}, + 5: {"time": datetime(1997, 1, 1)}, + 6: {"time": datetime(1998, 1, 1)}, + 7: {"time": datetime(1999, 1, 1)}, + 8: {"time": datetime(1999, 1, 1)}, + 9: {"time": datetime(1998, 1, 1)}, + 10: {"time": datetime(1997, 4, 1)}, + 11: {"time": datetime(1998, 5, 1)}, + } + + nx.set_node_attributes(G, node_attrs) + + assert nx.cd_index(G, 5, time_delta=_delta) == 1 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_tournament.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_tournament.py new file mode 100644 index 0000000..34d9b22 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_tournament.py @@ -0,0 +1,161 @@ +"""Unit tests for the :mod:`networkx.algorithms.tournament` module.""" + +from itertools import combinations + +import pytest + +from networkx import DiGraph +from networkx.algorithms.tournament import ( + hamiltonian_path, + index_satisfying, + is_reachable, + is_strongly_connected, + is_tournament, + random_tournament, + score_sequence, + tournament_matrix, +) + + +def test_condition_not_satisfied(): + iter_in = [0] + assert index_satisfying(iter_in, lambda x: x > 0) == 1 + + +def test_empty_iterable(): + with pytest.raises(ValueError): + index_satisfying([], lambda x: x > 0) + + +def test_is_tournament(): + G = DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) + assert is_tournament(G) + + +def test_self_loops(): + """A tournament must have no self-loops.""" + G = DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) + G.add_edge(0, 0) + assert not is_tournament(G) + + +def test_missing_edges(): + """A tournament must not have any pair of nodes without at least + one edge joining the pair. + + """ + G = DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3)]) + assert not is_tournament(G) + + +def test_bidirectional_edges(): + """A tournament must not have any pair of nodes with greater + than one edge joining the pair. + + """ + G = DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) + G.add_edge(1, 0) + assert not is_tournament(G) + + +def test_graph_is_tournament(): + for _ in range(10): + G = random_tournament(5) + assert is_tournament(G) + + +def test_graph_is_tournament_seed(): + for _ in range(10): + G = random_tournament(5, seed=1) + assert is_tournament(G) + + +def test_graph_is_tournament_one_node(): + G = random_tournament(1) + assert is_tournament(G) + + +def test_graph_is_tournament_zero_node(): + G = random_tournament(0) + assert is_tournament(G) + + +def test_hamiltonian_empty_graph(): + path = hamiltonian_path(DiGraph()) + assert len(path) == 0 + + +def test_path_is_hamiltonian(): + G = DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) + path = hamiltonian_path(G) + assert len(path) == 4 + assert all(v in G[u] for u, v in zip(path, path[1:])) + + +def test_hamiltonian_cycle(): + """Tests that :func:`networkx.tournament.hamiltonian_path` + returns a Hamiltonian cycle when provided a strongly connected + tournament. + + """ + G = DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0), (1, 3), (0, 2)]) + path = hamiltonian_path(G) + assert len(path) == 4 + assert all(v in G[u] for u, v in zip(path, path[1:])) + assert path[0] in G[path[-1]] + + +def test_score_sequence_edge(): + G = DiGraph([(0, 1)]) + assert score_sequence(G) == [0, 1] + + +def test_score_sequence_triangle(): + G = DiGraph([(0, 1), (1, 2), (2, 0)]) + assert score_sequence(G) == [1, 1, 1] + + +def test_tournament_matrix(): + np = pytest.importorskip("numpy") + pytest.importorskip("scipy") + npt = np.testing + G = DiGraph([(0, 1)]) + m = tournament_matrix(G) + npt.assert_array_equal(m.todense(), np.array([[0, 1], [-1, 0]])) + + +def test_reachable_pair(): + """Tests for a reachable pair of nodes.""" + G = DiGraph([(0, 1), (1, 2), (2, 0)]) + assert is_reachable(G, 0, 2) + + +def test_same_node_is_reachable(): + """Tests that a node is always reachable from it.""" + # G is an arbitrary tournament on ten nodes. + G = DiGraph(sorted(p) for p in combinations(range(10), 2)) + assert all(is_reachable(G, v, v) for v in G) + + +def test_unreachable_pair(): + """Tests for an unreachable pair of nodes.""" + G = DiGraph([(0, 1), (0, 2), (1, 2)]) + assert not is_reachable(G, 1, 0) + + +def test_is_strongly_connected(): + """Tests for a strongly connected tournament.""" + G = DiGraph([(0, 1), (1, 2), (2, 0)]) + assert is_strongly_connected(G) + + +def test_not_strongly_connected(): + """Tests for a tournament that is not strongly connected.""" + G = DiGraph([(0, 1), (0, 2), (1, 2)]) + assert not is_strongly_connected(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_triads.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_triads.py new file mode 100644 index 0000000..7f5bca2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_triads.py @@ -0,0 +1,248 @@ +"""Tests for the :mod:`networkx.algorithms.triads` module.""" + +import itertools +from collections import defaultdict +from random import sample + +import pytest + +import networkx as nx + + +def test_triadic_census(): + """Tests the triadic_census function.""" + G = nx.DiGraph() + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = { + "030T": 2, + "120C": 1, + "210": 0, + "120U": 0, + "012": 9, + "102": 3, + "021U": 0, + "111U": 0, + "003": 8, + "030C": 0, + "021D": 9, + "201": 0, + "111D": 1, + "300": 0, + "120D": 0, + "021C": 2, + } + actual = nx.triadic_census(G) + assert expected == actual + + +def test_is_triad(): + """Tests the is_triad function""" + G = nx.karate_club_graph() + G = G.to_directed() + for i in range(100): + nodes = sample(sorted(G.nodes()), 3) + G2 = G.subgraph(nodes) + assert nx.is_triad(G2) + + +def test_all_triads(): + """Tests the all_triads function.""" + G = nx.DiGraph() + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = [ + f"{i},{j},{k}" + for i in range(7) + for j in range(i + 1, 7) + for k in range(j + 1, 7) + ] + expected = [G.subgraph(x.split(",")) for x in expected] + actual = list(nx.all_triads(G)) + assert all(any(nx.is_isomorphic(G1, G2) for G1 in expected) for G2 in actual) + + +def test_triad_type(): + """Tests the triad_type function.""" + # 0 edges (1 type) + G = nx.DiGraph({0: [], 1: [], 2: []}) + assert nx.triad_type(G) == "003" + # 1 edge (1 type) + G = nx.DiGraph({0: [1], 1: [], 2: []}) + assert nx.triad_type(G) == "012" + # 2 edges (4 types) + G = nx.DiGraph([(0, 1), (0, 2)]) + assert nx.triad_type(G) == "021D" + G = nx.DiGraph({0: [1], 1: [0], 2: []}) + assert nx.triad_type(G) == "102" + G = nx.DiGraph([(0, 1), (2, 1)]) + assert nx.triad_type(G) == "021U" + G = nx.DiGraph([(0, 1), (1, 2)]) + assert nx.triad_type(G) == "021C" + # 3 edges (4 types) + G = nx.DiGraph([(0, 1), (1, 0), (2, 1)]) + assert nx.triad_type(G) == "111D" + G = nx.DiGraph([(0, 1), (1, 0), (1, 2)]) + assert nx.triad_type(G) == "111U" + G = nx.DiGraph([(0, 1), (1, 2), (0, 2)]) + assert nx.triad_type(G) == "030T" + G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + assert nx.triad_type(G) == "030C" + # 4 edges (4 types) + G = nx.DiGraph([(0, 1), (1, 0), (2, 0), (0, 2)]) + assert nx.triad_type(G) == "201" + G = nx.DiGraph([(0, 1), (1, 0), (2, 0), (2, 1)]) + assert nx.triad_type(G) == "120D" + G = nx.DiGraph([(0, 1), (1, 0), (0, 2), (1, 2)]) + assert nx.triad_type(G) == "120U" + G = nx.DiGraph([(0, 1), (1, 0), (0, 2), (2, 1)]) + assert nx.triad_type(G) == "120C" + # 5 edges (1 type) + G = nx.DiGraph([(0, 1), (1, 0), (2, 1), (1, 2), (0, 2)]) + assert nx.triad_type(G) == "210" + # 6 edges (1 type) + G = nx.DiGraph([(0, 1), (1, 0), (1, 2), (2, 1), (0, 2), (2, 0)]) + assert nx.triad_type(G) == "300" + + +def test_triads_by_type(): + G = nx.DiGraph() + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + all_triads = nx.all_triads(G) + expected = defaultdict(list) + for triad in all_triads: + name = nx.triad_type(triad) + expected[name].append(triad) + actual = nx.triads_by_type(G) + assert set(actual.keys()) == set(expected.keys()) + for tri_type, actual_Gs in actual.items(): + expected_Gs = expected[tri_type] + for a in actual_Gs: + assert any(nx.is_isomorphic(a, e) for e in expected_Gs) + + +def test_triadic_census_short_path_nodelist(): + G = nx.path_graph("abc", create_using=nx.DiGraph) + expected = {"021C": 1} + for nl in ["a", "b", "c", "ab", "ac", "bc", "abc"]: + triad_census = nx.triadic_census(G, nodelist=nl) + assert expected == {typ: cnt for typ, cnt in triad_census.items() if cnt > 0} + + +def test_triadic_census_correct_nodelist_values(): + G = nx.path_graph(5, create_using=nx.DiGraph) + msg = r"nodelist includes duplicate nodes or nodes not in G" + with pytest.raises(ValueError, match=msg): + nx.triadic_census(G, [1, 2, 2, 3]) + with pytest.raises(ValueError, match=msg): + nx.triadic_census(G, [1, 2, "a", 3]) + + +def test_triadic_census_tiny_graphs(): + tc = nx.triadic_census(nx.empty_graph(0, create_using=nx.DiGraph)) + assert {} == {typ: cnt for typ, cnt in tc.items() if cnt > 0} + tc = nx.triadic_census(nx.empty_graph(1, create_using=nx.DiGraph)) + assert {} == {typ: cnt for typ, cnt in tc.items() if cnt > 0} + tc = nx.triadic_census(nx.empty_graph(2, create_using=nx.DiGraph)) + assert {} == {typ: cnt for typ, cnt in tc.items() if cnt > 0} + tc = nx.triadic_census(nx.DiGraph([(1, 2)])) + assert {} == {typ: cnt for typ, cnt in tc.items() if cnt > 0} + + +def test_triadic_census_selfloops(): + GG = nx.path_graph("abc", create_using=nx.DiGraph) + expected = {"021C": 1} + for n in GG: + G = GG.copy() + G.add_edge(n, n) + tc = nx.triadic_census(G) + assert expected == {typ: cnt for typ, cnt in tc.items() if cnt > 0} + + GG = nx.path_graph("abcde", create_using=nx.DiGraph) + tbt = nx.triads_by_type(GG) + for n in GG: + GG.add_edge(n, n) + tc = nx.triadic_census(GG) + assert tc == {tt: len(tbt[tt]) for tt in tc} + + +def test_triadic_census_four_path(): + G = nx.path_graph("abcd", create_using=nx.DiGraph) + expected = {"012": 2, "021C": 2} + triad_census = nx.triadic_census(G) + assert expected == {typ: cnt for typ, cnt in triad_census.items() if cnt > 0} + + +def test_triadic_census_four_path_nodelist(): + G = nx.path_graph("abcd", create_using=nx.DiGraph) + expected_end = {"012": 2, "021C": 1} + expected_mid = {"012": 1, "021C": 2} + a_triad_census = nx.triadic_census(G, nodelist=["a"]) + assert expected_end == {typ: cnt for typ, cnt in a_triad_census.items() if cnt > 0} + b_triad_census = nx.triadic_census(G, nodelist=["b"]) + assert expected_mid == {typ: cnt for typ, cnt in b_triad_census.items() if cnt > 0} + c_triad_census = nx.triadic_census(G, nodelist=["c"]) + assert expected_mid == {typ: cnt for typ, cnt in c_triad_census.items() if cnt > 0} + d_triad_census = nx.triadic_census(G, nodelist=["d"]) + assert expected_end == {typ: cnt for typ, cnt in d_triad_census.items() if cnt > 0} + + +def test_triadic_census_nodelist(): + """Tests the triadic_census function.""" + G = nx.DiGraph() + G.add_edges_from(["01", "02", "03", "04", "05", "12", "16", "51", "56", "65"]) + expected = { + "030T": 2, + "120C": 1, + "210": 0, + "120U": 0, + "012": 9, + "102": 3, + "021U": 0, + "111U": 0, + "003": 8, + "030C": 0, + "021D": 9, + "201": 0, + "111D": 1, + "300": 0, + "120D": 0, + "021C": 2, + } + actual = dict.fromkeys(expected, 0) + for node in G.nodes(): + node_triad_census = nx.triadic_census(G, nodelist=[node]) + for triad_key in expected: + actual[triad_key] += node_triad_census[triad_key] + # Divide all counts by 3 + for k, v in actual.items(): + actual[k] //= 3 + assert expected == actual + + +@pytest.mark.parametrize("N", [5, 10]) +def test_triadic_census_on_random_graph(N): + G = nx.binomial_graph(N, 0.3, directed=True, seed=42) + tc1 = nx.triadic_census(G) + tbt = nx.triads_by_type(G) + tc2 = {tt: len(tbt[tt]) for tt in tc1} + assert tc1 == tc2 + + for n in G: + tc1 = nx.triadic_census(G, nodelist={n}) + tc2 = {tt: sum(1 for t in tbt.get(tt, []) if n in t) for tt in tc1} + assert tc1 == tc2 + + for ns in itertools.combinations(G, 2): + ns = set(ns) + tc1 = nx.triadic_census(G, nodelist=ns) + tc2 = { + tt: sum(1 for t in tbt.get(tt, []) if any(n in ns for n in t)) for tt in tc1 + } + assert tc1 == tc2 + + for ns in itertools.combinations(G, 3): + ns = set(ns) + tc1 = nx.triadic_census(G, nodelist=ns) + tc2 = { + tt: sum(1 for t in tbt.get(tt, []) if any(n in ns for n in t)) for tt in tc1 + } + assert tc1 == tc2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_vitality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_vitality.py new file mode 100644 index 0000000..248206e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_vitality.py @@ -0,0 +1,41 @@ +import networkx as nx + + +class TestClosenessVitality: + def test_unweighted(self): + G = nx.cycle_graph(3) + vitality = nx.closeness_vitality(G) + assert vitality == {0: 2, 1: 2, 2: 2} + + def test_weighted(self): + G = nx.Graph() + nx.add_cycle(G, [0, 1, 2], weight=2) + vitality = nx.closeness_vitality(G, weight="weight") + assert vitality == {0: 4, 1: 4, 2: 4} + + def test_unweighted_digraph(self): + G = nx.DiGraph(nx.cycle_graph(3)) + vitality = nx.closeness_vitality(G) + assert vitality == {0: 4, 1: 4, 2: 4} + + def test_weighted_digraph(self): + G = nx.DiGraph() + nx.add_cycle(G, [0, 1, 2], weight=2) + nx.add_cycle(G, [2, 1, 0], weight=2) + vitality = nx.closeness_vitality(G, weight="weight") + assert vitality == {0: 8, 1: 8, 2: 8} + + def test_weighted_multidigraph(self): + G = nx.MultiDiGraph() + nx.add_cycle(G, [0, 1, 2], weight=2) + nx.add_cycle(G, [2, 1, 0], weight=2) + vitality = nx.closeness_vitality(G, weight="weight") + assert vitality == {0: 8, 1: 8, 2: 8} + + def test_disconnecting_graph(self): + """Tests that the closeness vitality of a node whose removal + disconnects the graph is negative infinity. + + """ + G = nx.path_graph(3) + assert nx.closeness_vitality(G, node=1) == -float("inf") diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_voronoi.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_voronoi.py new file mode 100644 index 0000000..3269ae6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_voronoi.py @@ -0,0 +1,103 @@ +import networkx as nx +from networkx.utils import pairwise + + +class TestVoronoiCells: + """Unit tests for the Voronoi cells function.""" + + def test_isolates(self): + """Tests that a graph with isolated nodes has all isolates in + one block of the partition. + + """ + G = nx.empty_graph(5) + cells = nx.voronoi_cells(G, {0, 2, 4}) + expected = {0: {0}, 2: {2}, 4: {4}, "unreachable": {1, 3}} + assert expected == cells + + def test_undirected_unweighted(self): + G = nx.cycle_graph(6) + cells = nx.voronoi_cells(G, {0, 3}) + expected = {0: {0, 1, 5}, 3: {2, 3, 4}} + assert expected == cells + + def test_directed_unweighted(self): + # This is the singly-linked directed cycle graph on six nodes. + G = nx.DiGraph(pairwise(range(6), cyclic=True)) + cells = nx.voronoi_cells(G, {0, 3}) + expected = {0: {0, 1, 2}, 3: {3, 4, 5}} + assert expected == cells + + def test_directed_inward(self): + """Tests that reversing the graph gives the "inward" Voronoi + partition. + + """ + # This is the singly-linked reverse directed cycle graph on six nodes. + G = nx.DiGraph(pairwise(range(6), cyclic=True)) + G = G.reverse(copy=False) + cells = nx.voronoi_cells(G, {0, 3}) + expected = {0: {0, 4, 5}, 3: {1, 2, 3}} + assert expected == cells + + def test_undirected_weighted(self): + edges = [(0, 1, 10), (1, 2, 1), (2, 3, 1)] + G = nx.Graph() + G.add_weighted_edges_from(edges) + cells = nx.voronoi_cells(G, {0, 3}) + expected = {0: {0}, 3: {1, 2, 3}} + assert expected == cells + + def test_directed_weighted(self): + edges = [(0, 1, 10), (1, 2, 1), (2, 3, 1), (3, 2, 1), (2, 1, 1)] + G = nx.DiGraph() + G.add_weighted_edges_from(edges) + cells = nx.voronoi_cells(G, {0, 3}) + expected = {0: {0}, 3: {1, 2, 3}} + assert expected == cells + + def test_multigraph_unweighted(self): + """Tests that the Voronoi cells for a multigraph are the same as + for a simple graph. + + """ + edges = [(0, 1), (1, 2), (2, 3)] + G = nx.MultiGraph(2 * edges) + H = nx.Graph(G) + G_cells = nx.voronoi_cells(G, {0, 3}) + H_cells = nx.voronoi_cells(H, {0, 3}) + assert G_cells == H_cells + + def test_multidigraph_unweighted(self): + # This is the twice-singly-linked directed cycle graph on six nodes. + edges = list(pairwise(range(6), cyclic=True)) + G = nx.MultiDiGraph(2 * edges) + H = nx.DiGraph(G) + G_cells = nx.voronoi_cells(G, {0, 3}) + H_cells = nx.voronoi_cells(H, {0, 3}) + assert G_cells == H_cells + + def test_multigraph_weighted(self): + edges = [(0, 1, 10), (0, 1, 10), (1, 2, 1), (1, 2, 100), (2, 3, 1), (2, 3, 100)] + G = nx.MultiGraph() + G.add_weighted_edges_from(edges) + cells = nx.voronoi_cells(G, {0, 3}) + expected = {0: {0}, 3: {1, 2, 3}} + assert expected == cells + + def test_multidigraph_weighted(self): + edges = [ + (0, 1, 10), + (0, 1, 10), + (1, 2, 1), + (2, 3, 1), + (3, 2, 10), + (3, 2, 1), + (2, 1, 10), + (2, 1, 1), + ] + G = nx.MultiDiGraph() + G.add_weighted_edges_from(edges) + cells = nx.voronoi_cells(G, {0, 3}) + expected = {0: {0}, 3: {1, 2, 3}} + assert expected == cells diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_walks.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_walks.py new file mode 100644 index 0000000..7a6b323 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_walks.py @@ -0,0 +1,54 @@ +"""Unit tests for the :mod:`networkx.algorithms.walks` module.""" + +import pytest + +import networkx as nx + +pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +def test_directed(): + G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + num_walks = nx.number_of_walks(G, 3) + expected = {0: {0: 1, 1: 0, 2: 0}, 1: {0: 0, 1: 1, 2: 0}, 2: {0: 0, 1: 0, 2: 1}} + assert num_walks == expected + + +def test_undirected(): + G = nx.cycle_graph(3) + num_walks = nx.number_of_walks(G, 3) + expected = {0: {0: 2, 1: 3, 2: 3}, 1: {0: 3, 1: 2, 2: 3}, 2: {0: 3, 1: 3, 2: 2}} + assert num_walks == expected + + +def test_non_integer_nodes(): + G = nx.DiGraph([("A", "B"), ("B", "C"), ("C", "A")]) + num_walks = nx.number_of_walks(G, 2) + expected = { + "A": {"A": 0, "B": 0, "C": 1}, + "B": {"A": 1, "B": 0, "C": 0}, + "C": {"A": 0, "B": 1, "C": 0}, + } + assert num_walks == expected + + +def test_zero_length(): + G = nx.cycle_graph(3) + num_walks = nx.number_of_walks(G, 0) + expected = {0: {0: 1, 1: 0, 2: 0}, 1: {0: 0, 1: 1, 2: 0}, 2: {0: 0, 1: 0, 2: 1}} + assert num_walks == expected + + +def test_negative_length_exception(): + G = nx.cycle_graph(3) + with pytest.raises(ValueError): + nx.number_of_walks(G, -1) + + +def test_hidden_weight_attr(): + G = nx.cycle_graph(3) + G.add_edge(1, 2, weight=5) + num_walks = nx.number_of_walks(G, 3) + expected = {0: {0: 2, 1: 3, 2: 3}, 1: {0: 3, 1: 2, 2: 3}, 2: {0: 3, 1: 3, 2: 2}} + assert num_walks == expected diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_wiener.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_wiener.py new file mode 100644 index 0000000..aded951 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tests/test_wiener.py @@ -0,0 +1,123 @@ +import networkx as nx + + +def test_wiener_index_of_disconnected_graph(): + assert nx.wiener_index(nx.empty_graph(2)) == float("inf") + + +def test_wiener_index_of_directed_graph(): + G = nx.complete_graph(3) + H = nx.DiGraph(G) + assert (2 * nx.wiener_index(G)) == nx.wiener_index(H) + + +def test_wiener_index_of_complete_graph(): + n = 10 + G = nx.complete_graph(n) + assert nx.wiener_index(G) == (n * (n - 1) / 2) + + +def test_wiener_index_of_path_graph(): + # In P_n, there are n - 1 pairs of vertices at distance one, n - + # 2 pairs at distance two, n - 3 at distance three, ..., 1 at + # distance n - 1, so the Wiener index should be + # + # 1 * (n - 1) + 2 * (n - 2) + ... + (n - 2) * 2 + (n - 1) * 1 + # + # For example, in P_5, + # + # 1 * 4 + 2 * 3 + 3 * 2 + 4 * 1 = 2 (1 * 4 + 2 * 3) + # + # and in P_6, + # + # 1 * 5 + 2 * 4 + 3 * 3 + 4 * 2 + 5 * 1 = 2 (1 * 5 + 2 * 4) + 3 * 3 + # + # assuming n is *odd*, this gives the formula + # + # 2 \sum_{i = 1}^{(n - 1) / 2} [i * (n - i)] + # + # assuming n is *even*, this gives the formula + # + # 2 \sum_{i = 1}^{n / 2} [i * (n - i)] - (n / 2) ** 2 + # + n = 9 + G = nx.path_graph(n) + expected = 2 * sum(i * (n - i) for i in range(1, (n // 2) + 1)) + actual = nx.wiener_index(G) + assert expected == actual + + +def test_schultz_and_gutman_index_of_disconnected_graph(): + n = 4 + G = nx.Graph() + G.add_nodes_from(list(range(1, n + 1))) + expected = float("inf") + + G.add_edge(1, 2) + G.add_edge(3, 4) + + actual_1 = nx.schultz_index(G) + actual_2 = nx.gutman_index(G) + + assert expected == actual_1 + assert expected == actual_2 + + +def test_schultz_and_gutman_index_of_complete_bipartite_graph_1(): + n = 3 + m = 3 + cbg = nx.complete_bipartite_graph(n, m) + + expected_1 = n * m * (n + m) + 2 * n * (n - 1) * m + 2 * m * (m - 1) * n + actual_1 = nx.schultz_index(cbg) + + expected_2 = n * m * (n * m) + n * (n - 1) * m * m + m * (m - 1) * n * n + actual_2 = nx.gutman_index(cbg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_complete_bipartite_graph_2(): + n = 2 + m = 5 + cbg = nx.complete_bipartite_graph(n, m) + + expected_1 = n * m * (n + m) + 2 * n * (n - 1) * m + 2 * m * (m - 1) * n + actual_1 = nx.schultz_index(cbg) + + expected_2 = n * m * (n * m) + n * (n - 1) * m * m + m * (m - 1) * n * n + actual_2 = nx.gutman_index(cbg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_complete_graph(): + n = 5 + cg = nx.complete_graph(n) + + expected_1 = n * (n - 1) * (n - 1) + actual_1 = nx.schultz_index(cg) + + assert expected_1 == actual_1 + + expected_2 = n * (n - 1) * (n - 1) * (n - 1) / 2 + actual_2 = nx.gutman_index(cg) + + assert expected_2 == actual_2 + + +def test_schultz_and_gutman_index_of_odd_cycle_graph(): + k = 5 + n = 2 * k + 1 + ocg = nx.cycle_graph(n) + + expected_1 = 2 * n * k * (k + 1) + actual_1 = nx.schultz_index(ocg) + + expected_2 = 2 * n * k * (k + 1) + actual_2 = nx.gutman_index(ocg) + + assert expected_1 == actual_1 + assert expected_2 == actual_2 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/threshold.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/threshold.py new file mode 100644 index 0000000..21705cc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/threshold.py @@ -0,0 +1,1035 @@ +""" +Threshold Graphs - Creation, manipulation and identification. +""" + +from math import sqrt + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = ["is_threshold_graph", "find_threshold_graph"] + + +@nx._dispatchable +def is_threshold_graph(G): + """ + Returns `True` if `G` is a threshold graph. + + Parameters + ---------- + G : NetworkX graph instance + An instance of `Graph`, `DiGraph`, `MultiGraph` or `MultiDiGraph` + + Returns + ------- + bool + `True` if `G` is a threshold graph, `False` otherwise. + + Examples + -------- + >>> from networkx.algorithms.threshold import is_threshold_graph + >>> G = nx.path_graph(3) + >>> is_threshold_graph(G) + True + >>> G = nx.barbell_graph(3, 3) + >>> is_threshold_graph(G) + False + + References + ---------- + .. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph + """ + return is_threshold_sequence([d for n, d in G.degree()]) + + +def is_threshold_sequence(degree_sequence): + """ + Returns True if the sequence is a threshold degree sequence. + + Uses the property that a threshold graph must be constructed by + adding either dominating or isolated nodes. Thus, it can be + deconstructed iteratively by removing a node of degree zero or a + node that connects to the remaining nodes. If this deconstruction + fails then the sequence is not a threshold sequence. + """ + ds = degree_sequence[:] # get a copy so we don't destroy original + ds.sort() + while ds: + if ds[0] == 0: # if isolated node + ds.pop(0) # remove it + continue + if ds[-1] != len(ds) - 1: # is the largest degree node dominating? + return False # no, not a threshold degree sequence + ds.pop() # yes, largest is the dominating node + ds = [d - 1 for d in ds] # remove it and decrement all degrees + return True + + +def creation_sequence(degree_sequence, with_labels=False, compact=False): + """ + Determines the creation sequence for the given threshold degree sequence. + + The creation sequence is a list of single characters 'd' + or 'i': 'd' for dominating or 'i' for isolated vertices. + Dominating vertices are connected to all vertices present when it + is added. The first node added is by convention 'd'. + This list can be converted to a string if desired using "".join(cs) + + If with_labels==True: + Returns a list of 2-tuples containing the vertex number + and a character 'd' or 'i' which describes the type of vertex. + + If compact==True: + Returns the creation sequence in a compact form that is the number + of 'i's and 'd's alternating. + Examples: + [1,2,2,3] represents d,i,i,d,d,i,i,i + [3,1,2] represents d,d,d,i,d,d + + Notice that the first number is the first vertex to be used for + construction and so is always 'd'. + + with_labels and compact cannot both be True. + + Returns None if the sequence is not a threshold sequence + """ + if with_labels and compact: + raise ValueError("compact sequences cannot be labeled") + + # make an indexed copy + if isinstance(degree_sequence, dict): # labeled degree sequence + ds = [[degree, label] for (label, degree) in degree_sequence.items()] + else: + ds = [[d, i] for i, d in enumerate(degree_sequence)] + ds.sort() + cs = [] # creation sequence + while ds: + if ds[0][0] == 0: # isolated node + (d, v) = ds.pop(0) + if len(ds) > 0: # make sure we start with a d + cs.insert(0, (v, "i")) + else: + cs.insert(0, (v, "d")) + continue + if ds[-1][0] != len(ds) - 1: # Not dominating node + return None # not a threshold degree sequence + (d, v) = ds.pop() + cs.insert(0, (v, "d")) + ds = [[d[0] - 1, d[1]] for d in ds] # decrement due to removing node + + if with_labels: + return cs + if compact: + return make_compact(cs) + return [v[1] for v in cs] # not labeled + + +def make_compact(creation_sequence): + """ + Returns the creation sequence in a compact form + that is the number of 'i's and 'd's alternating. + + Examples + -------- + >>> from networkx.algorithms.threshold import make_compact + >>> make_compact(["d", "i", "i", "d", "d", "i", "i", "i"]) + [1, 2, 2, 3] + >>> make_compact(["d", "d", "d", "i", "d", "d"]) + [3, 1, 2] + + Notice that the first number is the first vertex + to be used for construction and so is always 'd'. + + Labeled creation sequences lose their labels in the + compact representation. + + >>> make_compact([3, 1, 2]) + [3, 1, 2] + """ + first = creation_sequence[0] + if isinstance(first, str): # creation sequence + cs = creation_sequence[:] + elif isinstance(first, tuple): # labeled creation sequence + cs = [s[1] for s in creation_sequence] + elif isinstance(first, int): # compact creation sequence + return creation_sequence + else: + raise TypeError("Not a valid creation sequence type") + + ccs = [] + count = 1 # count the run lengths of d's or i's. + for i in range(1, len(cs)): + if cs[i] == cs[i - 1]: + count += 1 + else: + ccs.append(count) + count = 1 + ccs.append(count) # don't forget the last one + return ccs + + +def uncompact(creation_sequence): + """ + Converts a compact creation sequence for a threshold + graph to a standard creation sequence (unlabeled). + If the creation_sequence is already standard, return it. + See creation_sequence. + """ + first = creation_sequence[0] + if isinstance(first, str): # creation sequence + return creation_sequence + elif isinstance(first, tuple): # labeled creation sequence + return creation_sequence + elif isinstance(first, int): # compact creation sequence + ccscopy = creation_sequence[:] + else: + raise TypeError("Not a valid creation sequence type") + cs = [] + while ccscopy: + cs.extend(ccscopy.pop(0) * ["d"]) + if ccscopy: + cs.extend(ccscopy.pop(0) * ["i"]) + return cs + + +def creation_sequence_to_weights(creation_sequence): + """ + Returns a list of node weights which create the threshold + graph designated by the creation sequence. The weights + are scaled so that the threshold is 1.0. The order of the + nodes is the same as that in the creation sequence. + """ + # Turn input sequence into a labeled creation sequence + first = creation_sequence[0] + if isinstance(first, str): # creation sequence + if isinstance(creation_sequence, list): + wseq = creation_sequence[:] + else: + wseq = list(creation_sequence) # string like 'ddidid' + elif isinstance(first, tuple): # labeled creation sequence + wseq = [v[1] for v in creation_sequence] + elif isinstance(first, int): # compact creation sequence + wseq = uncompact(creation_sequence) + else: + raise TypeError("Not a valid creation sequence type") + # pass through twice--first backwards + wseq.reverse() + w = 0 + prev = "i" + for j, s in enumerate(wseq): + if s == "i": + wseq[j] = w + prev = s + elif prev == "i": + prev = s + w += 1 + wseq.reverse() # now pass through forwards + for j, s in enumerate(wseq): + if s == "d": + wseq[j] = w + prev = s + elif prev == "d": + prev = s + w += 1 + # Now scale weights + if prev == "d": + w += 1 + wscale = 1 / w + return [ww * wscale for ww in wseq] + # return wseq + + +def weights_to_creation_sequence( + weights, threshold=1, with_labels=False, compact=False +): + """ + Returns a creation sequence for a threshold graph + determined by the weights and threshold given as input. + If the sum of two node weights is greater than the + threshold value, an edge is created between these nodes. + + The creation sequence is a list of single characters 'd' + or 'i': 'd' for dominating or 'i' for isolated vertices. + Dominating vertices are connected to all vertices present + when it is added. The first node added is by convention 'd'. + + If with_labels==True: + Returns a list of 2-tuples containing the vertex number + and a character 'd' or 'i' which describes the type of vertex. + + If compact==True: + Returns the creation sequence in a compact form that is the number + of 'i's and 'd's alternating. + Examples: + [1,2,2,3] represents d,i,i,d,d,i,i,i + [3,1,2] represents d,d,d,i,d,d + + Notice that the first number is the first vertex to be used for + construction and so is always 'd'. + + with_labels and compact cannot both be True. + """ + if with_labels and compact: + raise ValueError("compact sequences cannot be labeled") + + # make an indexed copy + if isinstance(weights, dict): # labeled weights + wseq = [[w, label] for (label, w) in weights.items()] + else: + wseq = [[w, i] for i, w in enumerate(weights)] + wseq.sort() + cs = [] # creation sequence + cutoff = threshold - wseq[-1][0] + while wseq: + if wseq[0][0] < cutoff: # isolated node + (w, label) = wseq.pop(0) + cs.append((label, "i")) + else: + (w, label) = wseq.pop() + cs.append((label, "d")) + cutoff = threshold - wseq[-1][0] + if len(wseq) == 1: # make sure we start with a d + (w, label) = wseq.pop() + cs.append((label, "d")) + # put in correct order + cs.reverse() + + if with_labels: + return cs + if compact: + return make_compact(cs) + return [v[1] for v in cs] # not labeled + + +# Manipulating NetworkX.Graphs in context of threshold graphs +@nx._dispatchable(graphs=None, returns_graph=True) +def threshold_graph(creation_sequence, create_using=None): + """ + Create a threshold graph from the creation sequence or compact + creation_sequence. + + The input sequence can be a + + creation sequence (e.g. ['d','i','d','d','d','i']) + labeled creation sequence (e.g. [(0,'d'),(2,'d'),(1,'i')]) + compact creation sequence (e.g. [2,1,1,2,0]) + + Use cs=creation_sequence(degree_sequence,labeled=True) + to convert a degree sequence to a creation sequence. + + Returns None if the sequence is not valid + """ + # Turn input sequence into a labeled creation sequence + first = creation_sequence[0] + if isinstance(first, str): # creation sequence + ci = list(enumerate(creation_sequence)) + elif isinstance(first, tuple): # labeled creation sequence + ci = creation_sequence[:] + elif isinstance(first, int): # compact creation sequence + cs = uncompact(creation_sequence) + ci = list(enumerate(cs)) + else: + print("not a valid creation sequence type") + return None + + G = nx.empty_graph(0, create_using) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + G.name = "Threshold Graph" + + # add nodes and edges + # if type is 'i' just add nodea + # if type is a d connect to everything previous + while ci: + (v, node_type) = ci.pop(0) + if node_type == "d": # dominating type, connect to all existing nodes + # We use `for u in list(G):` instead of + # `for u in G:` because we edit the graph `G` in + # the loop. Hence using an iterator will result in + # `RuntimeError: dictionary changed size during iteration` + for u in list(G): + G.add_edge(v, u) + G.add_node(v) + return G + + +@nx._dispatchable +def find_alternating_4_cycle(G): + """ + Returns False if there aren't any alternating 4 cycles. + Otherwise returns the cycle as [a,b,c,d] where (a,b) + and (c,d) are edges and (a,c) and (b,d) are not. + """ + for u, v in G.edges(): + for w in G.nodes(): + if not G.has_edge(u, w) and u != w: + for x in G.neighbors(w): + if not G.has_edge(v, x) and v != x: + return [u, v, w, x] + return False + + +@nx._dispatchable(returns_graph=True) +def find_threshold_graph(G, create_using=None): + """ + Returns a threshold subgraph that is close to largest in `G`. + + The threshold graph will contain the largest degree node in G. + + Parameters + ---------- + G : NetworkX graph instance + An instance of `Graph`, or `MultiDiGraph` + create_using : NetworkX graph class or `None` (default), optional + Type of graph to use when constructing the threshold graph. + If `None`, infer the appropriate graph type from the input. + + Returns + ------- + graph : + A graph instance representing the threshold graph + + Examples + -------- + >>> from networkx.algorithms.threshold import find_threshold_graph + >>> G = nx.barbell_graph(3, 3) + >>> T = find_threshold_graph(G) + >>> T.nodes # may vary + NodeView((7, 8, 5, 6)) + + References + ---------- + .. [1] Threshold graphs: https://en.wikipedia.org/wiki/Threshold_graph + """ + return threshold_graph(find_creation_sequence(G), create_using) + + +@nx._dispatchable +def find_creation_sequence(G): + """ + Find a threshold subgraph that is close to largest in G. + Returns the labeled creation sequence of that threshold graph. + """ + cs = [] + # get a local pointer to the working part of the graph + H = G + while H.order() > 0: + # get new degree sequence on subgraph + dsdict = dict(H.degree()) + ds = [(d, v) for v, d in dsdict.items()] + ds.sort() + # Update threshold graph nodes + if ds[-1][0] == 0: # all are isolated + cs.extend(zip(dsdict, ["i"] * (len(ds) - 1) + ["d"])) + break # Done! + # pull off isolated nodes + while ds[0][0] == 0: + (d, iso) = ds.pop(0) + cs.append((iso, "i")) + # find new biggest node + (d, bigv) = ds.pop() + # add edges of star to t_g + cs.append((bigv, "d")) + # form subgraph of neighbors of big node + H = H.subgraph(H.neighbors(bigv)) + cs.reverse() + return cs + + +# Properties of Threshold Graphs +def triangles(creation_sequence): + """ + Compute number of triangles in the threshold graph with the + given creation sequence. + """ + # shortcut algorithm that doesn't require computing number + # of triangles at each node. + cs = creation_sequence # alias + dr = cs.count("d") # number of d's in sequence + ntri = dr * (dr - 1) * (dr - 2) / 6 # number of triangles in clique of nd d's + # now add dr choose 2 triangles for every 'i' in sequence where + # dr is the number of d's to the right of the current i + for i, typ in enumerate(cs): + if typ == "i": + ntri += dr * (dr - 1) / 2 + else: + dr -= 1 + return ntri + + +def triangle_sequence(creation_sequence): + """ + Return triangle sequence for the given threshold graph creation sequence. + + """ + cs = creation_sequence + seq = [] + dr = cs.count("d") # number of d's to the right of the current pos + dcur = (dr - 1) * (dr - 2) // 2 # number of triangles through a node of clique dr + irun = 0 # number of i's in the last run + drun = 0 # number of d's in the last run + for i, sym in enumerate(cs): + if sym == "d": + drun += 1 + tri = dcur + (dr - 1) * irun # new triangles at this d + else: # cs[i]="i": + if prevsym == "d": # new string of i's + dcur += (dr - 1) * irun # accumulate shared shortest paths + irun = 0 # reset i run counter + dr -= drun # reduce number of d's to right + drun = 0 # reset d run counter + irun += 1 + tri = dr * (dr - 1) // 2 # new triangles at this i + seq.append(tri) + prevsym = sym + return seq + + +def cluster_sequence(creation_sequence): + """ + Return cluster sequence for the given threshold graph creation sequence. + """ + triseq = triangle_sequence(creation_sequence) + degseq = degree_sequence(creation_sequence) + cseq = [] + for i, deg in enumerate(degseq): + tri = triseq[i] + if deg <= 1: # isolated vertex or single pair gets cc 0 + cseq.append(0) + continue + max_size = (deg * (deg - 1)) // 2 + cseq.append(tri / max_size) + return cseq + + +def degree_sequence(creation_sequence): + """ + Return degree sequence for the threshold graph with the given + creation sequence + """ + cs = creation_sequence # alias + seq = [] + rd = cs.count("d") # number of d to the right + for i, sym in enumerate(cs): + if sym == "d": + rd -= 1 + seq.append(rd + i) + else: + seq.append(rd) + return seq + + +def density(creation_sequence): + """ + Return the density of the graph with this creation_sequence. + The density is the fraction of possible edges present. + """ + N = len(creation_sequence) + two_size = sum(degree_sequence(creation_sequence)) + two_possible = N * (N - 1) + den = two_size / two_possible + return den + + +def degree_correlation(creation_sequence): + """ + Return the degree-degree correlation over all edges. + """ + cs = creation_sequence + s1 = 0 # deg_i*deg_j + s2 = 0 # deg_i^2+deg_j^2 + s3 = 0 # deg_i+deg_j + m = 0 # number of edges + rd = cs.count("d") # number of d nodes to the right + rdi = [i for i, sym in enumerate(cs) if sym == "d"] # index of "d"s + ds = degree_sequence(cs) + for i, sym in enumerate(cs): + if sym == "d": + if i != rdi[0]: + print("Logic error in degree_correlation", i, rdi) + raise ValueError + rdi.pop(0) + degi = ds[i] + for dj in rdi: + degj = ds[dj] + s1 += degj * degi + s2 += degi**2 + degj**2 + s3 += degi + degj + m += 1 + denom = 2 * m * s2 - s3 * s3 + numer = 4 * m * s1 - s3 * s3 + if denom == 0: + if numer == 0: + return 1 + raise ValueError(f"Zero Denominator but Numerator is {numer}") + return numer / denom + + +def shortest_path(creation_sequence, u, v): + """ + Find the shortest path between u and v in a + threshold graph G with the given creation_sequence. + + For an unlabeled creation_sequence, the vertices + u and v must be integers in (0,len(sequence)) referring + to the position of the desired vertices in the sequence. + + For a labeled creation_sequence, u and v are labels of vertices. + + Use cs=creation_sequence(degree_sequence,with_labels=True) + to convert a degree sequence to a creation sequence. + + Returns a list of vertices from u to v. + Example: if they are neighbors, it returns [u,v] + """ + # Turn input sequence into a labeled creation sequence + first = creation_sequence[0] + if isinstance(first, str): # creation sequence + cs = [(i, creation_sequence[i]) for i in range(len(creation_sequence))] + elif isinstance(first, tuple): # labeled creation sequence + cs = creation_sequence[:] + elif isinstance(first, int): # compact creation sequence + ci = uncompact(creation_sequence) + cs = [(i, ci[i]) for i in range(len(ci))] + else: + raise TypeError("Not a valid creation sequence type") + + verts = [s[0] for s in cs] + if v not in verts: + raise ValueError(f"Vertex {v} not in graph from creation_sequence") + if u not in verts: + raise ValueError(f"Vertex {u} not in graph from creation_sequence") + # Done checking + if u == v: + return [u] + + uindex = verts.index(u) + vindex = verts.index(v) + bigind = max(uindex, vindex) + if cs[bigind][1] == "d": + return [u, v] + # must be that cs[bigind][1]=='i' + cs = cs[bigind:] + while cs: + vert = cs.pop() + if vert[1] == "d": + return [u, vert[0], v] + # All after u are type 'i' so no connection + return -1 + + +def shortest_path_length(creation_sequence, i): + """ + Return the shortest path length from indicated node to + every other node for the threshold graph with the given + creation sequence. + Node is indicated by index i in creation_sequence unless + creation_sequence is labeled in which case, i is taken to + be the label of the node. + + Paths lengths in threshold graphs are at most 2. + Length to unreachable nodes is set to -1. + """ + # Turn input sequence into a labeled creation sequence + first = creation_sequence[0] + if isinstance(first, str): # creation sequence + if isinstance(creation_sequence, list): + cs = creation_sequence[:] + else: + cs = list(creation_sequence) + elif isinstance(first, tuple): # labeled creation sequence + cs = [v[1] for v in creation_sequence] + i = [v[0] for v in creation_sequence].index(i) + elif isinstance(first, int): # compact creation sequence + cs = uncompact(creation_sequence) + else: + raise TypeError("Not a valid creation sequence type") + + # Compute + N = len(cs) + spl = [2] * N # length 2 to every node + spl[i] = 0 # except self which is 0 + # 1 for all d's to the right + for j in range(i + 1, N): + if cs[j] == "d": + spl[j] = 1 + if cs[i] == "d": # 1 for all nodes to the left + for j in range(i): + spl[j] = 1 + # and -1 for any trailing i to indicate unreachable + for j in range(N - 1, 0, -1): + if cs[j] == "d": + break + spl[j] = -1 + return spl + + +def betweenness_sequence(creation_sequence, normalized=True): + """ + Return betweenness for the threshold graph with the given creation + sequence. The result is unscaled. To scale the values + to the interval [0,1] divide by (n-1)*(n-2). + """ + cs = creation_sequence + seq = [] # betweenness + lastchar = "d" # first node is always a 'd' + dr = float(cs.count("d")) # number of d's to the right of current pos + irun = 0 # number of i's in the last run + drun = 0 # number of d's in the last run + dlast = 0.0 # betweenness of last d + for i, c in enumerate(cs): + if c == "d": # cs[i]=="d": + # betweenness = amt shared with earlier d's and i's + # + new isolated nodes covered + # + new paths to all previous nodes + b = dlast + (irun - 1) * irun / dr + 2 * irun * (i - drun - irun) / dr + drun += 1 # update counter + else: # cs[i]="i": + if lastchar == "d": # if this is a new run of i's + dlast = b # accumulate betweenness + dr -= drun # update number of d's to the right + drun = 0 # reset d counter + irun = 0 # reset i counter + b = 0 # isolated nodes have zero betweenness + irun += 1 # add another i to the run + seq.append(float(b)) + lastchar = c + + # normalize by the number of possible shortest paths + if normalized: + order = len(cs) + scale = 1.0 / ((order - 1) * (order - 2)) + seq = [s * scale for s in seq] + + return seq + + +def eigenvectors(creation_sequence): + """ + Return a 2-tuple of Laplacian eigenvalues and eigenvectors + for the threshold network with creation_sequence. + The first value is a list of eigenvalues. + The second value is a list of eigenvectors. + The lists are in the same order so corresponding eigenvectors + and eigenvalues are in the same position in the two lists. + + Notice that the order of the eigenvalues returned by eigenvalues(cs) + may not correspond to the order of these eigenvectors. + """ + ccs = make_compact(creation_sequence) + N = sum(ccs) + vec = [0] * N + val = vec[:] + # get number of type d nodes to the right (all for first node) + dr = sum(ccs[::2]) + + nn = ccs[0] + vec[0] = [1.0 / sqrt(N)] * N + val[0] = 0 + e = dr + dr -= nn + type_d = True + i = 1 + dd = 1 + while dd < nn: + scale = 1.0 / sqrt(dd * dd + i) + vec[i] = i * [-scale] + [dd * scale] + [0] * (N - i - 1) + val[i] = e + i += 1 + dd += 1 + if len(ccs) == 1: + return (val, vec) + for nn in ccs[1:]: + scale = 1.0 / sqrt(nn * i * (i + nn)) + vec[i] = i * [-nn * scale] + nn * [i * scale] + [0] * (N - i - nn) + # find eigenvalue + type_d = not type_d + if type_d: + e = i + dr + dr -= nn + else: + e = dr + val[i] = e + st = i + i += 1 + dd = 1 + while dd < nn: + scale = 1.0 / sqrt(i - st + dd * dd) + vec[i] = [0] * st + (i - st) * [-scale] + [dd * scale] + [0] * (N - i - 1) + val[i] = e + i += 1 + dd += 1 + return (val, vec) + + +def spectral_projection(u, eigenpairs): + """ + Returns the coefficients of each eigenvector + in a projection of the vector u onto the normalized + eigenvectors which are contained in eigenpairs. + + eigenpairs should be a list of two objects. The + first is a list of eigenvalues and the second a list + of eigenvectors. The eigenvectors should be lists. + + There's not a lot of error checking on lengths of + arrays, etc. so be careful. + """ + coeff = [] + evect = eigenpairs[1] + for ev in evect: + c = sum(evv * uv for (evv, uv) in zip(ev, u)) + coeff.append(c) + return coeff + + +def eigenvalues(creation_sequence): + """ + Return sequence of eigenvalues of the Laplacian of the threshold + graph for the given creation_sequence. + + Based on the Ferrer's diagram method. The spectrum is integral + and is the conjugate of the degree sequence. + + See:: + + @Article{degree-merris-1994, + author = {Russel Merris}, + title = {Degree maximal graphs are Laplacian integral}, + journal = {Linear Algebra Appl.}, + year = {1994}, + volume = {199}, + pages = {381--389}, + } + + """ + degseq = degree_sequence(creation_sequence) + degseq.sort() + eiglist = [] # zero is always one eigenvalue + eig = 0 + row = len(degseq) + bigdeg = degseq.pop() + while row: + if bigdeg < row: + eiglist.append(eig) + row -= 1 + else: + eig += 1 + if degseq: + bigdeg = degseq.pop() + else: + bigdeg = 0 + return eiglist + + +# Threshold graph creation routines + + +@py_random_state(2) +def random_threshold_sequence(n, p, seed=None): + """ + Create a random threshold sequence of size n. + A creation sequence is built by randomly choosing d's with + probability p and i's with probability 1-p. + + s=nx.random_threshold_sequence(10,0.5) + + returns a threshold sequence of length 10 with equal + probably of an i or a d at each position. + + A "random" threshold graph can be built with + + G=nx.threshold_graph(s) + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + """ + if not (0 <= p <= 1): + raise ValueError("p must be in [0,1]") + + cs = ["d"] # threshold sequences always start with a d + for i in range(1, n): + if seed.random() < p: + cs.append("d") + else: + cs.append("i") + return cs + + +# maybe *_d_threshold_sequence routines should +# be (or be called from) a single routine with a more descriptive name +# and a keyword parameter? +def right_d_threshold_sequence(n, m): + """ + Returns a "right-dominated" threshold sequence with `n` vertices and `m` edges. + + Each vertex in the sequence is either dominant or isolated. + In the "right-dominated" version, once the basic sequence is formed, + isolated vertices may be flipped to dominant from the right in order + to reach the target number of edges. + + Parameters + ---------- + n : int + Number of vertices. + m : int + Number of edges. + + Returns + ------- + A list of 'd' (dominant) and 'i' (isolated) forming a right-dominated threshold sequence. + + Raises + ------ + ValueError + If `m` exceeds the maximum number of edges. + + Examples + -------- + >>> from networkx.algorithms.threshold import right_d_threshold_sequence + >>> right_d_threshold_sequence(5, 3) + ['d', 'i', 'i', 'd', 'i'] + """ + + cs = ["d"] + ["i"] * (n - 1) # create sequence with n insolated nodes + + # m n * (n - 1) / 2: + raise ValueError("Too many edges for this many nodes.") + + # connected case m >n-1 + ind = n - 1 + sum = n - 1 + while sum < m: + cs[ind] = "d" + ind -= 1 + sum += ind + ind = m - (sum - ind) + cs[ind] = "d" + return cs + + +def left_d_threshold_sequence(n, m): + """ + Returns a "left-dominated" threshold sequence with `n` vertices and `m` edges. + + Each vertex in the sequence is either dominant or isolated. + In the "left-dominated" version, once the basic sequence is formed, + isolated vertices may be flipped to dominant from the left in order + to reach the target number of edges. + + Parameters + ---------- + n : int + Number of vertices. + m : int + Number of edges. + + Returns + ------- + A list of 'd' (dominant) and 'i' (isolated) forming a left-dominated threshold sequence. + + Raises + ------ + ValueError + If `m` exceeds the maximum number of edges. + + Examples + -------- + For certain small cases, both left and right dominated versions produce + the same sequence. However, for larger values of `m`, the difference in + flipping order becomes evident. For instance, compare the sequences for + ``n=6, m=8``: + + >>> from networkx.algorithms.threshold import left_d_threshold_sequence + >>> seq = left_d_threshold_sequence(6, 8) + >>> seq + ['d', 'd', 'd', 'i', 'i', 'd'] + + In contrast, the right-dominated version yields: + + >>> from networkx.algorithms.threshold import right_d_threshold_sequence + >>> right_seq = right_d_threshold_sequence(6, 8) + >>> right_seq + ['d', 'i', 'i', 'd', 'i', 'd'] + """ + + cs = ["d"] + ["i"] * (n - 1) # create sequence with n insolated nodes + + # m n * (n - 1) / 2: + raise ValueError("Too many edges for this many nodes.") + + # Connected case when M>N-1 + cs[n - 1] = "d" + sum = n - 1 + ind = 1 + while sum < m: + cs[ind] = "d" + sum += ind + ind += 1 + if sum > m: # be sure not to change the first vertex + cs[sum - m] = "i" + return cs + + +@py_random_state(3) +def swap_d(cs, p_split=1.0, p_combine=1.0, seed=None): + """ + Perform a "swap" operation on a threshold sequence. + + The swap preserves the number of nodes and edges + in the graph for the given sequence. + The resulting sequence is still a threshold sequence. + + Perform one split and one combine operation on the + 'd's of a creation sequence for a threshold graph. + This operation maintains the number of nodes and edges + in the graph, but shifts the edges from node to node + maintaining the threshold quality of the graph. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + """ + # preprocess the creation sequence + dlist = [i for (i, node_type) in enumerate(cs[1:-1]) if node_type == "d"] + # split + if seed.random() < p_split: + choice = seed.choice(dlist) + split_to = seed.choice(range(choice)) + flip_side = choice - split_to + if split_to != flip_side and cs[split_to] == "i" and cs[flip_side] == "i": + cs[choice] = "i" + cs[split_to] = "d" + cs[flip_side] = "d" + dlist.remove(choice) + # don't add or combine may reverse this action + # dlist.extend([split_to,flip_side]) + # print >>sys.stderr,"split at %s to %s and %s"%(choice,split_to,flip_side) + # combine + if seed.random() < p_combine and dlist: + first_choice = seed.choice(dlist) + second_choice = seed.choice(dlist) + target = first_choice + second_choice + if target >= len(cs) or cs[target] == "d" or first_choice == second_choice: + return cs + # OK to combine + cs[first_choice] = "i" + cs[second_choice] = "i" + cs[target] = "d" + # print >>sys.stderr,"combine %s and %s to make %s."%(first_choice,second_choice,target) + + return cs diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/time_dependent.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/time_dependent.py new file mode 100644 index 0000000..d67cdcf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/time_dependent.py @@ -0,0 +1,142 @@ +"""Time dependent algorithms.""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["cd_index"] + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable(node_attrs={"time": None, "weight": 1}) +def cd_index(G, node, time_delta, *, time="time", weight=None): + r"""Compute the CD index for `node` within the graph `G`. + + Calculates the CD index for the given node of the graph, + considering only its predecessors who have the `time` attribute + smaller than or equal to the `time` attribute of the `node` + plus `time_delta`. + + Parameters + ---------- + G : graph + A directed networkx graph whose nodes have `time` attributes and optionally + `weight` attributes (if a weight is not given, it is considered 1). + node : node + The node for which the CD index is calculated. + time_delta : numeric or timedelta + Amount of time after the `time` attribute of the `node`. The value of + `time_delta` must support comparison with the `time` node attribute. For + example, if the `time` attribute of the nodes are `datetime.datetime` + objects, then `time_delta` should be a `datetime.timedelta` object. + time : string (Optional, default is "time") + The name of the node attribute that will be used for the calculations. + weight : string (Optional, default is None) + The name of the node attribute used as weight. + + Returns + ------- + float + The CD index calculated for the node `node` within the graph `G`. + + Raises + ------ + NetworkXError + If not all nodes have a `time` attribute or + `time_delta` and `time` attribute types are not compatible or + `n` equals 0. + + NetworkXNotImplemented + If `G` is a non-directed graph or a multigraph. + + Examples + -------- + >>> from datetime import datetime, timedelta + >>> G = nx.DiGraph() + >>> nodes = { + ... 1: {"time": datetime(2015, 1, 1)}, + ... 2: {"time": datetime(2012, 1, 1), "weight": 4}, + ... 3: {"time": datetime(2010, 1, 1)}, + ... 4: {"time": datetime(2008, 1, 1)}, + ... 5: {"time": datetime(2014, 1, 1)}, + ... } + >>> G.add_nodes_from([(n, nodes[n]) for n in nodes]) + >>> edges = [(1, 3), (1, 4), (2, 3), (3, 4), (3, 5)] + >>> G.add_edges_from(edges) + >>> delta = timedelta(days=5 * 365) + >>> nx.cd_index(G, 3, time_delta=delta, time="time") + 0.5 + >>> nx.cd_index(G, 3, time_delta=delta, time="time", weight="weight") + 0.12 + + Integers can also be used for the time values: + >>> node_times = {1: 2015, 2: 2012, 3: 2010, 4: 2008, 5: 2014} + >>> nx.set_node_attributes(G, node_times, "new_time") + >>> nx.cd_index(G, 3, time_delta=4, time="new_time") + 0.5 + >>> nx.cd_index(G, 3, time_delta=4, time="new_time", weight="weight") + 0.12 + + Notes + ----- + This method implements the algorithm for calculating the CD index, + as described in the paper by Funk and Owen-Smith [1]_. The CD index + is used in order to check how consolidating or destabilizing a patent + is, hence the nodes of the graph represent patents and the edges show + the citations between these patents. The mathematical model is given + below: + + .. math:: + CD_{t}=\frac{1}{n_{t}}\sum_{i=1}^{n}\frac{-2f_{it}b_{it}+f_{it}}{w_{it}}, + + where `f_{it}` equals 1 if `i` cites the focal patent else 0, `b_{it}` equals + 1 if `i` cites any of the focal patents successors else 0, `n_{t}` is the number + of forward citations in `i` and `w_{it}` is a matrix of weight for patent `i` + at time `t`. + + The `datetime.timedelta` package can lead to off-by-one issues when converting + from years to days. In the example above `timedelta(days=5 * 365)` looks like + 5 years, but it isn't because of leap year days. So it gives the same result + as `timedelta(days=4 * 365)`. But using `timedelta(days=5 * 365 + 1)` gives + a 5 year delta **for this choice of years** but may not if the 5 year gap has + more than 1 leap year. To avoid these issues, use integers to represent years, + or be very careful when you convert units of time. + + References + ---------- + .. [1] Funk, Russell J., and Jason Owen-Smith. + "A dynamic network measure of technological change." + Management science 63, no. 3 (2017): 791-817. + http://russellfunk.org/cdindex/static/papers/funk_ms_2017.pdf + + """ + if not all(time in G.nodes[n] for n in G): + raise nx.NetworkXError("Not all nodes have a 'time' attribute.") + + try: + # get target_date + target_date = G.nodes[node][time] + time_delta + # keep the predecessors that existed before the target date + pred = {i for i in G.pred[node] if G.nodes[i][time] <= target_date} + except: + raise nx.NetworkXError( + "Addition and comparison are not supported between 'time_delta' " + "and 'time' types." + ) + + # -1 if any edge between node's predecessors and node's successors, else 1 + b = [-1 if any(j in G[i] for j in G[node]) else 1 for i in pred] + + # n is size of the union of the focal node's predecessors and its successors' predecessors + n = len(pred.union(*(G.pred[s].keys() - {node} for s in G[node]))) + if n == 0: + raise nx.NetworkXError("The cd index cannot be defined.") + + # calculate cd index + if weight is None: + return round(sum(bi for bi in b) / n, 2) + else: + # If a node has the specified weight attribute, its weight is used in the calculation + # otherwise, a weight of 1 is assumed for that node + weights = [G.nodes[i].get(weight, 1) for i in pred] + return round(sum(bi / wt for bi, wt in zip(b, weights)) / n, 2) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tournament.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tournament.py new file mode 100644 index 0000000..ccb19a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tournament.py @@ -0,0 +1,404 @@ +"""Functions concerning tournament graphs. + +A `tournament graph`_ is a complete oriented graph. In other words, it +is a directed graph in which there is exactly one directed edge joining +each pair of distinct nodes. For each function in this module that +accepts a graph as input, you must provide a tournament graph. The +responsibility is on the caller to ensure that the graph is a tournament +graph: + + >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + >>> nx.is_tournament(G) + True + +To access the functions in this module, you must access them through the +:mod:`networkx.tournament` module:: + + >>> nx.tournament.is_reachable(G, 0, 1) + True + +.. _tournament graph: https://en.wikipedia.org/wiki/Tournament_%28graph_theory%29 + +""" + +from itertools import combinations + +import networkx as nx +from networkx.algorithms.simple_paths import is_simple_path as is_path +from networkx.utils import arbitrary_element, not_implemented_for, py_random_state + +__all__ = [ + "hamiltonian_path", + "is_reachable", + "is_strongly_connected", + "is_tournament", + "random_tournament", + "score_sequence", + "tournament_matrix", +] + + +def index_satisfying(iterable, condition): + """Returns the index of the first element in `iterable` that + satisfies the given condition. + + If no such element is found (that is, when the iterable is + exhausted), this returns the length of the iterable (that is, one + greater than the last index of the iterable). + + `iterable` must not be empty. If `iterable` is empty, this + function raises :exc:`ValueError`. + + """ + # Pre-condition: iterable must not be empty. + for i, x in enumerate(iterable): + if condition(x): + return i + # If we reach the end of the iterable without finding an element + # that satisfies the condition, return the length of the iterable, + # which is one greater than the index of its last element. If the + # iterable was empty, `i` will not be defined, so we raise an + # exception. + try: + return i + 1 + except NameError as err: + raise ValueError("iterable must be non-empty") from err + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_tournament(G): + """Returns True if and only if `G` is a tournament. + + A tournament is a directed graph, with neither self-loops nor + multi-edges, in which there is exactly one directed edge joining + each pair of distinct nodes. + + Parameters + ---------- + G : NetworkX graph + A directed graph representing a tournament. + + Returns + ------- + bool + Whether the given graph is a tournament graph. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 0)]) + >>> nx.is_tournament(G) + True + + Notes + ----- + Some definitions require a self-loop on each node, but that is not + the convention used here. + + """ + # In a tournament, there is exactly one directed edge joining each pair. + return ( + all((v in G[u]) ^ (u in G[v]) for u, v in combinations(G, 2)) + and nx.number_of_selfloops(G) == 0 + ) + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable +def hamiltonian_path(G): + """Returns a Hamiltonian path in the given tournament graph. + + Each tournament has a Hamiltonian path. If furthermore, the + tournament is strongly connected, then the returned Hamiltonian path + is a Hamiltonian cycle (by joining the endpoints of the path). + + Parameters + ---------- + G : NetworkX graph + A directed graph representing a tournament. + + Returns + ------- + path : list + A list of nodes which form a Hamiltonian path in `G`. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]) + >>> nx.is_tournament(G) + True + >>> nx.tournament.hamiltonian_path(G) + [0, 1, 2, 3] + + Notes + ----- + This is a recursive implementation with an asymptotic running time + of $O(n^2)$, ignoring multiplicative polylogarithmic factors, where + $n$ is the number of nodes in the graph. + + """ + if len(G) == 0: + return [] + if len(G) == 1: + return [arbitrary_element(G)] + v = arbitrary_element(G) + hampath = hamiltonian_path(G.subgraph(set(G) - {v})) + # Get the index of the first node in the path that does *not* have + # an edge to `v`, then insert `v` before that node. + index = index_satisfying(hampath, lambda u: v not in G[u]) + hampath.insert(index, v) + return hampath + + +@py_random_state(1) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_tournament(n, seed=None): + r"""Returns a random tournament graph on `n` nodes. + + Parameters + ---------- + n : int + The number of nodes in the returned graph. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : DiGraph + A tournament on `n` nodes, with exactly one directed edge joining + each pair of distinct nodes. + + Notes + ----- + This algorithm adds, for each pair of distinct nodes, an edge with + uniformly random orientation. In other words, `\binom{n}{2}` flips + of an unbiased coin decide the orientations of the edges in the + graph. + + """ + # Flip an unbiased coin for each pair of distinct nodes. + coins = (seed.random() for i in range((n * (n - 1)) // 2)) + pairs = combinations(range(n), 2) + edges = ((u, v) if r < 0.5 else (v, u) for (u, v), r in zip(pairs, coins)) + return nx.DiGraph(edges) + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable +def score_sequence(G): + """Returns the score sequence for the given tournament graph. + + The score sequence is the sorted list of the out-degrees of the + nodes of the graph. + + Parameters + ---------- + G : NetworkX graph + A directed graph representing a tournament. + + Returns + ------- + list + A sorted list of the out-degrees of the nodes of `G`. + + Examples + -------- + >>> G = nx.DiGraph([(1, 0), (1, 3), (0, 2), (0, 3), (2, 1), (3, 2)]) + >>> nx.is_tournament(G) + True + >>> nx.tournament.score_sequence(G) + [1, 1, 2, 2] + + """ + return sorted(d for v, d in G.out_degree()) + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}}) +def tournament_matrix(G): + r"""Returns the tournament matrix for the given tournament graph. + + This function requires SciPy. + + The *tournament matrix* of a tournament graph with edge set *E* is + the matrix *T* defined by + + .. math:: + + T_{i j} = + \begin{cases} + +1 & \text{if } (i, j) \in E \\ + -1 & \text{if } (j, i) \in E \\ + 0 & \text{if } i == j. + \end{cases} + + An equivalent definition is `T = A - A^T`, where *A* is the + adjacency matrix of the graph `G`. + + Parameters + ---------- + G : NetworkX graph + A directed graph representing a tournament. + + Returns + ------- + SciPy sparse array + The tournament matrix of the tournament graph `G`. + + Raises + ------ + ImportError + If SciPy is not available. + + """ + A = nx.adjacency_matrix(G) + return A - A.T + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable +def is_reachable(G, s, t): + """Decides whether there is a path from `s` to `t` in the + tournament. + + This function is more theoretically efficient than the reachability + checks than the shortest path algorithms in + :mod:`networkx.algorithms.shortest_paths`. + + The given graph **must** be a tournament, otherwise this function's + behavior is undefined. + + Parameters + ---------- + G : NetworkX graph + A directed graph representing a tournament. + + s : node + A node in the graph. + + t : node + A node in the graph. + + Returns + ------- + bool + Whether there is a path from `s` to `t` in `G`. + + Examples + -------- + >>> G = nx.DiGraph([(1, 0), (1, 3), (1, 2), (2, 3), (2, 0), (3, 0)]) + >>> nx.is_tournament(G) + True + >>> nx.tournament.is_reachable(G, 1, 3) + True + >>> nx.tournament.is_reachable(G, 3, 2) + False + + Notes + ----- + Although this function is more theoretically efficient than the + generic shortest path functions, a speedup requires the use of + parallelism. Though it may in the future, the current implementation + does not use parallelism, thus you may not see much of a speedup. + + This algorithm comes from [1]. + + References + ---------- + .. [1] Tantau, Till. + "A note on the complexity of the reachability problem for + tournaments." + *Electronic Colloquium on Computational Complexity*. 2001. + + """ + + def two_neighborhood(G, v): + """Returns the set of nodes at distance at most two from `v`. + + `G` must be a graph and `v` a node in that graph. + + The returned set includes the nodes at distance zero (that is, + the node `v` itself), the nodes at distance one (that is, the + out-neighbors of `v`), and the nodes at distance two. + + """ + return { + x for x in G if x == v or x in G[v] or any(is_path(G, [v, z, x]) for z in G) + } + + def is_closed(G, nodes): + """Decides whether the given set of nodes is closed. + + A set *S* of nodes is *closed* if for each node *u* in the graph + not in *S* and for each node *v* in *S*, there is an edge from + *u* to *v*. + + """ + return all(v in G[u] for u in set(G) - nodes for v in nodes) + + neighborhoods = [two_neighborhood(G, v) for v in G] + return all(not (is_closed(G, S) and s in S and t not in S) for S in neighborhoods) + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable(name="tournament_is_strongly_connected") +def is_strongly_connected(G): + """Decides whether the given tournament is strongly connected. + + This function is more theoretically efficient than the + :func:`~networkx.algorithms.components.is_strongly_connected` + function. + + The given graph **must** be a tournament, otherwise this function's + behavior is undefined. + + Parameters + ---------- + G : NetworkX graph + A directed graph representing a tournament. + + Returns + ------- + bool + Whether the tournament is strongly connected. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 0)]) + >>> nx.is_tournament(G) + True + >>> nx.tournament.is_strongly_connected(G) + True + >>> G.remove_edge(3, 0) + >>> G.add_edge(0, 3) + >>> nx.is_tournament(G) + True + >>> nx.tournament.is_strongly_connected(G) + False + + Notes + ----- + Although this function is more theoretically efficient than the + generic strong connectivity function, a speedup requires the use of + parallelism. Though it may in the future, the current implementation + does not use parallelism, thus you may not see much of a speedup. + + This algorithm comes from [1]. + + References + ---------- + .. [1] Tantau, Till. + "A note on the complexity of the reachability problem for + tournaments." + *Electronic Colloquium on Computational Complexity*. 2001. + + + """ + return all(is_reachable(G, u, v) for u in G for v in G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__init__.py new file mode 100644 index 0000000..93e6cdd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__init__.py @@ -0,0 +1,5 @@ +from .beamsearch import * +from .breadth_first_search import * +from .depth_first_search import * +from .edgedfs import * +from .edgebfs import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..7959741 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/beamsearch.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/beamsearch.cpython-312.pyc new file mode 100644 index 0000000..1c0ff61 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/beamsearch.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-312.pyc new file mode 100644 index 0000000..e5a495c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/breadth_first_search.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-312.pyc new file mode 100644 index 0000000..b0529e4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/depth_first_search.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/edgebfs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/edgebfs.cpython-312.pyc new file mode 100644 index 0000000..d07c0a1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/edgebfs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/edgedfs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/edgedfs.cpython-312.pyc new file mode 100644 index 0000000..f74ab37 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/__pycache__/edgedfs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/beamsearch.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/beamsearch.py new file mode 100644 index 0000000..23fbe7b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/beamsearch.py @@ -0,0 +1,90 @@ +"""Basic algorithms for breadth-first searching the nodes of a graph.""" + +import networkx as nx + +__all__ = ["bfs_beam_edges"] + + +@nx._dispatchable +def bfs_beam_edges(G, source, value, width=None): + """Iterates over edges in a beam search. + + The beam search is a generalized breadth-first search in which only + the "best" *w* neighbors of the current node are enqueued, where *w* + is the beam width and "best" is an application-specific + heuristic. In general, a beam search with a small beam width might + not visit each node in the graph. + + .. note:: + + With the default value of ``width=None`` or `width` greater than the + maximum degree of the graph, this function equates to a slower + version of `~networkx.algorithms.traversal.breadth_first_search.bfs_edges`. + All nodes will be visited, though the order of the reported edges may + vary. In such cases, `value` has no effect - consider using `bfs_edges` + directly instead. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for the breadth-first search; this function + iterates over only those edges in the component reachable from + this node. + + value : function + A function that takes a node of the graph as input and returns a + real number indicating how "good" it is. A higher value means it + is more likely to be visited sooner during the search. When + visiting a new node, only the `width` neighbors with the highest + `value` are enqueued (in decreasing order of `value`). + + width : int (default = None) + The beam width for the search. This is the number of neighbors + (ordered by `value`) to enqueue when visiting each new node. + + Yields + ------ + edge + Edges in the beam search starting from `source`, given as a pair + of nodes. + + Examples + -------- + To give nodes with, for example, a higher centrality precedence + during the search, set the `value` function to return the centrality + value of the node: + + >>> G = nx.karate_club_graph() + >>> centrality = nx.eigenvector_centrality(G) + >>> list(nx.bfs_beam_edges(G, source=0, value=centrality.get, width=3)) + [(0, 2), (0, 1), (0, 8), (2, 32), (1, 13), (8, 33)] + """ + + if width is None: + width = len(G) + + def successors(v): + """Returns a list of the best neighbors of a node. + + `v` is a node in the graph `G`. + + The "best" neighbors are chosen according to the `value` + function (higher is better). Only the `width` best neighbors of + `v` are returned. + """ + # TODO The Python documentation states that for small values, it + # is better to use `heapq.nlargest`. We should determine the + # threshold at which its better to use `heapq.nlargest()` + # instead of `sorted()[:]` and apply that optimization here. + # + # If `width` is greater than the number of neighbors of `v`, all + # neighbors are returned by the semantics of slicing in + # Python. This occurs in the special case that the user did not + # specify a `width`: in this case all neighbors are always + # returned, so this is just a (slower) implementation of + # `bfs_edges(G, source)` but with a sorted enqueue step. + return iter(sorted(G.neighbors(v), key=value, reverse=True)[:width]) + + yield from nx.generic_bfs_edges(G, source, successors) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/breadth_first_search.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/breadth_first_search.py new file mode 100644 index 0000000..706a6d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/breadth_first_search.py @@ -0,0 +1,576 @@ +"""Basic algorithms for breadth-first searching the nodes of a graph.""" + +from collections import deque + +import networkx as nx + +__all__ = [ + "bfs_edges", + "bfs_tree", + "bfs_predecessors", + "bfs_successors", + "descendants_at_distance", + "bfs_layers", + "bfs_labeled_edges", + "generic_bfs_edges", +] + + +@nx._dispatchable +def generic_bfs_edges(G, source, neighbors=None, depth_limit=None): + """Iterate over edges in a breadth-first search. + + The breadth-first search begins at `source` and enqueues the + neighbors of newly visited nodes specified by the `neighbors` + function. + + Parameters + ---------- + G : NetworkX graph + + source : node + Starting node for the breadth-first search; this function + iterates over only those edges in the component reachable from + this node. + + neighbors : function + A function that takes a newly visited node of the graph as input + and returns an *iterator* (not just a list) of nodes that are + neighbors of that node with custom ordering. If not specified, this is + just the ``G.neighbors`` method, but in general it can be any function + that returns an iterator over some or all of the neighbors of a + given node, in any order. + + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth. + + Yields + ------ + edge + Edges in the breadth-first search starting from `source`. + + Examples + -------- + >>> G = nx.path_graph(7) + >>> list(nx.generic_bfs_edges(G, source=0)) + [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6)] + >>> list(nx.generic_bfs_edges(G, source=2)) + [(2, 1), (2, 3), (1, 0), (3, 4), (4, 5), (5, 6)] + >>> list(nx.generic_bfs_edges(G, source=2, depth_limit=2)) + [(2, 1), (2, 3), (1, 0), (3, 4)] + + The `neighbors` param can be used to specify the visitation order of each + node's neighbors generically. In the following example, we modify the default + neighbor to return *odd* nodes first: + + >>> def odd_first(n): + ... return sorted(G.neighbors(n), key=lambda x: x % 2, reverse=True) + + >>> G = nx.star_graph(5) + >>> list(nx.generic_bfs_edges(G, source=0)) # Default neighbor ordering + [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5)] + >>> list(nx.generic_bfs_edges(G, source=0, neighbors=odd_first)) + [(0, 1), (0, 3), (0, 5), (0, 2), (0, 4)] + + Notes + ----- + This implementation is from `PADS`_, which was in the public domain + when it was first accessed in July, 2004. The modifications + to allow depth limits are based on the Wikipedia article + "`Depth-limited-search`_". + + .. _PADS: http://www.ics.uci.edu/~eppstein/PADS/BFS.py + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search + """ + if neighbors is None: + neighbors = G.neighbors + if depth_limit is None: + depth_limit = len(G) + + seen = {source} + n = len(G) + depth = 0 + next_parents_children = [(source, neighbors(source))] + while next_parents_children and depth < depth_limit: + this_parents_children = next_parents_children + next_parents_children = [] + for parent, children in this_parents_children: + for child in children: + if child not in seen: + seen.add(child) + next_parents_children.append((child, neighbors(child))) + yield parent, child + if len(seen) == n: + return + depth += 1 + + +@nx._dispatchable +def bfs_edges(G, source, reverse=False, depth_limit=None, sort_neighbors=None): + """Iterate over edges in a breadth-first-search starting at source. + + Parameters + ---------- + G : NetworkX graph + + source : node + Specify starting node for breadth-first search; this function + iterates over only those edges in the component reachable from + this node. + + reverse : bool, optional + If True traverse a directed graph in the reverse direction + + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Yields + ------ + edge: 2-tuple of nodes + Yields edges resulting from the breadth-first search. + + Examples + -------- + To get the edges in a breadth-first search:: + + >>> G = nx.path_graph(3) + >>> list(nx.bfs_edges(G, 0)) + [(0, 1), (1, 2)] + >>> list(nx.bfs_edges(G, source=0, depth_limit=1)) + [(0, 1)] + + To get the nodes in a breadth-first search order:: + + >>> G = nx.path_graph(3) + >>> root = 2 + >>> edges = nx.bfs_edges(G, root) + >>> nodes = [root] + [v for u, v in edges] + >>> nodes + [2, 1, 0] + + Notes + ----- + The naming of this function is very similar to + :func:`~networkx.algorithms.traversal.edgebfs.edge_bfs`. The difference + is that ``edge_bfs`` yields edges even if they extend back to an already + explored node while this generator yields the edges of the tree that results + from a breadth-first-search (BFS) so no edges are reported if they extend + to already explored nodes. That means ``edge_bfs`` reports all edges while + ``bfs_edges`` only reports those traversed by a node-based BFS. Yet another + description is that ``bfs_edges`` reports the edges traversed during BFS + while ``edge_bfs`` reports all edges in the order they are explored. + + Based on the breadth-first search implementation in PADS [1]_ + by D. Eppstein, July 2004; with modifications to allow depth limits + as described in [2]_. + + References + ---------- + .. [1] http://www.ics.uci.edu/~eppstein/PADS/BFS.py. + .. [2] https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + bfs_tree + :func:`~networkx.algorithms.traversal.depth_first_search.dfs_edges` + :func:`~networkx.algorithms.traversal.edgebfs.edge_bfs` + + """ + if reverse and G.is_directed(): + successors = G.predecessors + else: + successors = G.neighbors + + if sort_neighbors is not None: + yield from generic_bfs_edges( + G, source, lambda node: iter(sort_neighbors(successors(node))), depth_limit + ) + else: + yield from generic_bfs_edges(G, source, successors, depth_limit) + + +@nx._dispatchable(returns_graph=True) +def bfs_tree(G, source, reverse=False, depth_limit=None, sort_neighbors=None): + """Returns an oriented tree constructed from of a breadth-first-search + starting at source. + + Parameters + ---------- + G : NetworkX graph + + source : node + Specify starting node for breadth-first search + + reverse : bool, optional + If True traverse a directed graph in the reverse direction + + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + T: NetworkX DiGraph + An oriented tree + + Examples + -------- + >>> G = nx.path_graph(3) + >>> list(nx.bfs_tree(G, 1).edges()) + [(1, 0), (1, 2)] + >>> H = nx.Graph() + >>> nx.add_path(H, [0, 1, 2, 3, 4, 5, 6]) + >>> nx.add_path(H, [2, 7, 8, 9, 10]) + >>> sorted(list(nx.bfs_tree(H, source=3, depth_limit=3).edges())) + [(1, 0), (2, 1), (2, 7), (3, 2), (3, 4), (4, 5), (5, 6), (7, 8)] + + + Notes + ----- + Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py + by D. Eppstein, July 2004. The modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited-search`_". + + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_tree + bfs_edges + edge_bfs + """ + T = nx.DiGraph() + T.add_node(source) + edges_gen = bfs_edges( + G, + source, + reverse=reverse, + depth_limit=depth_limit, + sort_neighbors=sort_neighbors, + ) + T.add_edges_from(edges_gen) + return T + + +@nx._dispatchable +def bfs_predecessors(G, source, depth_limit=None, sort_neighbors=None): + """Returns an iterator of predecessors in breadth-first-search from source. + + Parameters + ---------- + G : NetworkX graph + + source : node + Specify starting node for breadth-first search + + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + pred: iterator + (node, predecessor) iterator where `predecessor` is the predecessor of + `node` in a breadth first search starting from `source`. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> dict(nx.bfs_predecessors(G, 0)) + {1: 0, 2: 1} + >>> H = nx.Graph() + >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]) + >>> dict(nx.bfs_predecessors(H, 0)) + {1: 0, 2: 0, 3: 1, 4: 1, 5: 2, 6: 2} + >>> M = nx.Graph() + >>> nx.add_path(M, [0, 1, 2, 3, 4, 5, 6]) + >>> nx.add_path(M, [2, 7, 8, 9, 10]) + >>> sorted(nx.bfs_predecessors(M, source=1, depth_limit=3)) + [(0, 1), (2, 1), (3, 2), (4, 3), (7, 2), (8, 7)] + >>> N = nx.DiGraph() + >>> nx.add_path(N, [0, 1, 2, 3, 4, 7]) + >>> nx.add_path(N, [3, 5, 6, 7]) + >>> sorted(nx.bfs_predecessors(N, source=2)) + [(3, 2), (4, 3), (5, 3), (6, 5), (7, 4)] + + Notes + ----- + Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py + by D. Eppstein, July 2004. The modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited-search`_". + + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + bfs_tree + bfs_edges + edge_bfs + """ + for s, t in bfs_edges( + G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ): + yield (t, s) + + +@nx._dispatchable +def bfs_successors(G, source, depth_limit=None, sort_neighbors=None): + """Returns an iterator of successors in breadth-first-search from source. + + Parameters + ---------- + G : NetworkX graph + + source : node + Specify starting node for breadth-first search + + depth_limit : int, optional(default=len(G)) + Specify the maximum search depth + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + succ: iterator + (node, successors) iterator where `successors` is the non-empty list of + successors of `node` in a breadth first search from `source`. + To appear in the iterator, `node` must have successors. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> dict(nx.bfs_successors(G, 0)) + {0: [1], 1: [2]} + >>> H = nx.Graph() + >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]) + >>> dict(nx.bfs_successors(H, 0)) + {0: [1, 2], 1: [3, 4], 2: [5, 6]} + >>> G = nx.Graph() + >>> nx.add_path(G, [0, 1, 2, 3, 4, 5, 6]) + >>> nx.add_path(G, [2, 7, 8, 9, 10]) + >>> dict(nx.bfs_successors(G, source=1, depth_limit=3)) + {1: [0, 2], 2: [3, 7], 3: [4], 7: [8]} + >>> G = nx.DiGraph() + >>> nx.add_path(G, [0, 1, 2, 3, 4, 5]) + >>> dict(nx.bfs_successors(G, source=3)) + {3: [4], 4: [5]} + + Notes + ----- + Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py + by D. Eppstein, July 2004.The modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited-search`_". + + .. _Depth-limited-search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + bfs_tree + bfs_edges + edge_bfs + """ + parent = source + children = [] + for p, c in bfs_edges( + G, source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ): + if p == parent: + children.append(c) + continue + yield (parent, children) + children = [c] + parent = p + yield (parent, children) + + +@nx._dispatchable +def bfs_layers(G, sources): + """Returns an iterator of all the layers in breadth-first search traversal. + + Parameters + ---------- + G : NetworkX graph + A graph over which to find the layers using breadth-first search. + + sources : node in `G` or iterable of nodes in `G` + Specify starting nodes for single source or multiple sources + breadth-first search. + + Yields + ------ + layer : list of nodes + Yields list of nodes at the same distance from `sources`. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> dict(enumerate(nx.bfs_layers(G, [0, 4]))) + {0: [0, 4], 1: [1, 3], 2: [2]} + >>> H = nx.Graph() + >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]) + >>> dict(enumerate(nx.bfs_layers(H, [1]))) + {0: [1], 1: [0, 3, 4], 2: [2], 3: [5, 6]} + >>> dict(enumerate(nx.bfs_layers(H, [1, 6]))) + {0: [1, 6], 1: [0, 3, 4, 2], 2: [5]} + """ + if sources in G: + sources = [sources] + + visited = set(sources) + current_layer = list(visited) + + for source in current_layer: + if source not in G: + raise nx.NetworkXError(f"The node {source} is not in the graph.") + + # this is basically BFS, except that the current layer only stores the nodes at + # same distance from sources at each iteration + while current_layer: + yield current_layer + next_layer = [] + for node in current_layer: + for child in G[node]: + if child not in visited: + visited.add(child) + next_layer.append(child) + current_layer = next_layer + + +REVERSE_EDGE = "reverse" +TREE_EDGE = "tree" +FORWARD_EDGE = "forward" +LEVEL_EDGE = "level" + + +@nx._dispatchable +def bfs_labeled_edges(G, sources): + """Iterate over edges in a breadth-first search (BFS) labeled by type. + + We generate triple of the form (*u*, *v*, *d*), where (*u*, *v*) is the + edge being explored in the breadth-first search and *d* is one of the + strings 'tree', 'forward', 'level', or 'reverse'. A 'tree' edge is one in + which *v* is first discovered and placed into the layer below *u*. A + 'forward' edge is one in which *u* is on the layer above *v* and *v* has + already been discovered. A 'level' edge is one in which both *u* and *v* + occur on the same layer. A 'reverse' edge is one in which *u* is on a layer + below *v*. + + We emit each edge exactly once. In an undirected graph, 'reverse' edges do + not occur, because each is discovered either as a 'tree' or 'forward' edge. + + Parameters + ---------- + G : NetworkX graph + A graph over which to find the layers using breadth-first search. + + sources : node in `G` or list of nodes in `G` + Starting nodes for single source or multiple sources breadth-first search + + Yields + ------ + edges: generator + A generator of triples (*u*, *v*, *d*) where (*u*, *v*) is the edge being + explored and *d* is described above. + + Examples + -------- + >>> G = nx.cycle_graph(4, create_using=nx.DiGraph) + >>> list(nx.bfs_labeled_edges(G, 0)) + [(0, 1, 'tree'), (1, 2, 'tree'), (2, 3, 'tree'), (3, 0, 'reverse')] + >>> G = nx.complete_graph(3) + >>> list(nx.bfs_labeled_edges(G, 0)) + [(0, 1, 'tree'), (0, 2, 'tree'), (1, 2, 'level')] + >>> list(nx.bfs_labeled_edges(G, [0, 1])) + [(0, 1, 'level'), (0, 2, 'tree'), (1, 2, 'forward')] + """ + if sources in G: + sources = [sources] + + neighbors = G._adj + directed = G.is_directed() + visited = set() + visit = visited.discard if directed else visited.add + # We use visited in a negative sense, so the visited set stays empty for the + # directed case and level edges are reported on their first occurrence in + # the undirected case. Note our use of visited.discard -- this is built-in + # thus somewhat faster than a python-defined def nop(x): pass + depth = dict.fromkeys(sources, 0) + queue = deque(depth.items()) + push = queue.append + pop = queue.popleft + while queue: + u, du = pop() + for v in neighbors[u]: + if v not in depth: + depth[v] = dv = du + 1 + push((v, dv)) + yield u, v, TREE_EDGE + else: + dv = depth[v] + if du == dv: + if v not in visited: + yield u, v, LEVEL_EDGE + elif du < dv: + yield u, v, FORWARD_EDGE + elif directed: + yield u, v, REVERSE_EDGE + visit(u) + + +@nx._dispatchable +def descendants_at_distance(G, source, distance): + """Returns all nodes at a fixed `distance` from `source` in `G`. + + Parameters + ---------- + G : NetworkX graph + A graph + source : node in `G` + distance : the distance of the wanted nodes from `source` + + Returns + ------- + set() + The descendants of `source` in `G` at the given `distance` from `source` + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.descendants_at_distance(G, 2, 2) + {0, 4} + >>> H = nx.DiGraph() + >>> H.add_edges_from([(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)]) + >>> nx.descendants_at_distance(H, 0, 2) + {3, 4, 5, 6} + >>> nx.descendants_at_distance(H, 5, 0) + {5} + >>> nx.descendants_at_distance(H, 5, 1) + set() + """ + if source not in G: + raise nx.NetworkXError(f"The node {source} is not in the graph.") + + bfs_generator = nx.bfs_layers(G, source) + for i, layer in enumerate(bfs_generator): + if i == distance: + return set(layer) + return set() diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/depth_first_search.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/depth_first_search.py new file mode 100644 index 0000000..5bac5ec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/depth_first_search.py @@ -0,0 +1,529 @@ +"""Basic algorithms for depth-first searching the nodes of a graph.""" + +from collections import defaultdict + +import networkx as nx + +__all__ = [ + "dfs_edges", + "dfs_tree", + "dfs_predecessors", + "dfs_successors", + "dfs_preorder_nodes", + "dfs_postorder_nodes", + "dfs_labeled_edges", +] + + +@nx._dispatchable +def dfs_edges(G, source=None, depth_limit=None, *, sort_neighbors=None): + """Iterate over edges in a depth-first-search (DFS). + + Perform a depth-first-search over the nodes of `G` and yield + the edges in order. This may not generate all edges in `G` + (see `~networkx.algorithms.traversal.edgedfs.edge_dfs`). + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Specify starting node for depth-first search and yield edges in + the component reachable from source. + + depth_limit : int, optional (default=len(G)) + Specify the maximum search depth. + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Yields + ------ + edge: 2-tuple of nodes + Yields edges resulting from the depth-first-search. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> list(nx.dfs_edges(G, source=0)) + [(0, 1), (1, 2), (2, 3), (3, 4)] + >>> list(nx.dfs_edges(G, source=0, depth_limit=2)) + [(0, 1), (1, 2)] + + Notes + ----- + If a source is not specified then a source is chosen arbitrarily and + repeatedly until all components in the graph are searched. + + The implementation of this function is adapted from David Eppstein's + depth-first search function in PADS [1]_, with modifications + to allow depth limits based on the Wikipedia article + "Depth-limited search" [2]_. + + See Also + -------- + dfs_preorder_nodes + dfs_postorder_nodes + dfs_labeled_edges + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_edges` + + References + ---------- + .. [1] http://www.ics.uci.edu/~eppstein/PADS + .. [2] https://en.wikipedia.org/wiki/Depth-limited_search + """ + if source is None: + # edges for all components + nodes = G + else: + # edges for components with source + nodes = [source] + if depth_limit is None: + depth_limit = len(G) + + get_children = ( + G.neighbors + if sort_neighbors is None + else lambda n: iter(sort_neighbors(G.neighbors(n))) + ) + + visited = set() + for start in nodes: + if start in visited: + continue + visited.add(start) + stack = [(start, get_children(start))] + depth_now = 1 + while stack: + parent, children = stack[-1] + for child in children: + if child not in visited: + yield parent, child + visited.add(child) + if depth_now < depth_limit: + stack.append((child, get_children(child))) + depth_now += 1 + break + else: + stack.pop() + depth_now -= 1 + + +@nx._dispatchable(returns_graph=True) +def dfs_tree(G, source=None, depth_limit=None, *, sort_neighbors=None): + """Returns oriented tree constructed from a depth-first-search from source. + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Specify starting node for depth-first search. + + depth_limit : int, optional (default=len(G)) + Specify the maximum search depth. + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + T : NetworkX DiGraph + An oriented tree + + Examples + -------- + >>> G = nx.path_graph(5) + >>> T = nx.dfs_tree(G, source=0, depth_limit=2) + >>> list(T.edges()) + [(0, 1), (1, 2)] + >>> T = nx.dfs_tree(G, source=0) + >>> list(T.edges()) + [(0, 1), (1, 2), (2, 3), (3, 4)] + + See Also + -------- + dfs_preorder_nodes + dfs_postorder_nodes + dfs_labeled_edges + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` + """ + T = nx.DiGraph() + if source is None: + T.add_nodes_from(G) + else: + T.add_node(source) + T.add_edges_from(dfs_edges(G, source, depth_limit, sort_neighbors=sort_neighbors)) + return T + + +@nx._dispatchable +def dfs_predecessors(G, source=None, depth_limit=None, *, sort_neighbors=None): + """Returns dictionary of predecessors in depth-first-search from source. + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Specify starting node for depth-first search. + Note that you will get predecessors for all nodes in the + component containing `source`. This input only specifies + where the DFS starts. + + depth_limit : int, optional (default=len(G)) + Specify the maximum search depth. + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + pred: dict + A dictionary with nodes as keys and predecessor nodes as values. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.dfs_predecessors(G, source=0) + {1: 0, 2: 1, 3: 2} + >>> nx.dfs_predecessors(G, source=0, depth_limit=2) + {1: 0, 2: 1} + + Notes + ----- + If a source is not specified then a source is chosen arbitrarily and + repeatedly until all components in the graph are searched. + + The implementation of this function is adapted from David Eppstein's + depth-first search function in `PADS`_, with modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited search`_". + + .. _PADS: http://www.ics.uci.edu/~eppstein/PADS + .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_preorder_nodes + dfs_postorder_nodes + dfs_labeled_edges + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` + """ + return { + t: s + for s, t in dfs_edges(G, source, depth_limit, sort_neighbors=sort_neighbors) + } + + +@nx._dispatchable +def dfs_successors(G, source=None, depth_limit=None, *, sort_neighbors=None): + """Returns dictionary of successors in depth-first-search from source. + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Specify starting node for depth-first search. + Note that you will get successors for all nodes in the + component containing `source`. This input only specifies + where the DFS starts. + + depth_limit : int, optional (default=len(G)) + Specify the maximum search depth. + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + succ: dict + A dictionary with nodes as keys and list of successor nodes as values. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.dfs_successors(G, source=0) + {0: [1], 1: [2], 2: [3], 3: [4]} + >>> nx.dfs_successors(G, source=0, depth_limit=2) + {0: [1], 1: [2]} + + Notes + ----- + If a source is not specified then a source is chosen arbitrarily and + repeatedly until all components in the graph are searched. + + The implementation of this function is adapted from David Eppstein's + depth-first search function in `PADS`_, with modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited search`_". + + .. _PADS: http://www.ics.uci.edu/~eppstein/PADS + .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_preorder_nodes + dfs_postorder_nodes + dfs_labeled_edges + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` + """ + d = defaultdict(list) + for s, t in dfs_edges( + G, + source=source, + depth_limit=depth_limit, + sort_neighbors=sort_neighbors, + ): + d[s].append(t) + return dict(d) + + +@nx._dispatchable +def dfs_postorder_nodes(G, source=None, depth_limit=None, *, sort_neighbors=None): + """Generate nodes in a depth-first-search post-ordering starting at source. + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Specify starting node for depth-first search. + + depth_limit : int, optional (default=len(G)) + Specify the maximum search depth. + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + nodes: generator + A generator of nodes in a depth-first-search post-ordering. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> list(nx.dfs_postorder_nodes(G, source=0)) + [4, 3, 2, 1, 0] + >>> list(nx.dfs_postorder_nodes(G, source=0, depth_limit=2)) + [1, 0] + + Notes + ----- + If a source is not specified then a source is chosen arbitrarily and + repeatedly until all components in the graph are searched. + + The implementation of this function is adapted from David Eppstein's + depth-first search function in `PADS`_, with modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited search`_". + + .. _PADS: http://www.ics.uci.edu/~eppstein/PADS + .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_edges + dfs_preorder_nodes + dfs_labeled_edges + :func:`~networkx.algorithms.traversal.edgedfs.edge_dfs` + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_tree` + """ + edges = nx.dfs_labeled_edges( + G, source=source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ) + return (v for u, v, d in edges if d == "reverse") + + +@nx._dispatchable +def dfs_preorder_nodes(G, source=None, depth_limit=None, *, sort_neighbors=None): + """Generate nodes in a depth-first-search pre-ordering starting at source. + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Specify starting node for depth-first search and return nodes in + the component reachable from source. + + depth_limit : int, optional (default=len(G)) + Specify the maximum search depth. + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + nodes: generator + A generator of nodes in a depth-first-search pre-ordering. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> list(nx.dfs_preorder_nodes(G, source=0)) + [0, 1, 2, 3, 4] + >>> list(nx.dfs_preorder_nodes(G, source=0, depth_limit=2)) + [0, 1, 2] + + Notes + ----- + If a source is not specified then a source is chosen arbitrarily and + repeatedly until all components in the graph are searched. + + The implementation of this function is adapted from David Eppstein's + depth-first search function in `PADS`_, with modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited search`_". + + .. _PADS: http://www.ics.uci.edu/~eppstein/PADS + .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_edges + dfs_postorder_nodes + dfs_labeled_edges + :func:`~networkx.algorithms.traversal.breadth_first_search.bfs_edges` + """ + edges = nx.dfs_labeled_edges( + G, source=source, depth_limit=depth_limit, sort_neighbors=sort_neighbors + ) + return (v for u, v, d in edges if d == "forward") + + +@nx._dispatchable +def dfs_labeled_edges(G, source=None, depth_limit=None, *, sort_neighbors=None): + """Iterate over edges in a depth-first-search (DFS) labeled by type. + + Parameters + ---------- + G : NetworkX graph + + source : node, optional + Specify starting node for depth-first search and return edges in + the component reachable from source. + + depth_limit : int, optional (default=len(G)) + Specify the maximum search depth. + + sort_neighbors : function (default=None) + A function that takes an iterator over nodes as the input, and + returns an iterable of the same nodes with a custom ordering. + For example, `sorted` will sort the nodes in increasing order. + + Returns + ------- + edges: generator + A generator of triples of the form (*u*, *v*, *d*), where (*u*, + *v*) is the edge being explored in the depth-first search and *d* + is one of the strings 'forward', 'nontree', 'reverse', or 'reverse-depth_limit'. + A 'forward' edge is one in which *u* has been visited but *v* has + not. A 'nontree' edge is one in which both *u* and *v* have been + visited but the edge is not in the DFS tree. A 'reverse' edge is + one in which both *u* and *v* have been visited and the edge is in + the DFS tree. When the `depth_limit` is reached via a 'forward' edge, + a 'reverse' edge is immediately generated rather than the subtree + being explored. To indicate this flavor of 'reverse' edge, the string + yielded is 'reverse-depth_limit'. + + Examples + -------- + + The labels reveal the complete transcript of the depth-first search + algorithm in more detail than, for example, :func:`dfs_edges`:: + + >>> from pprint import pprint + >>> + >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 1)]) + >>> pprint(list(nx.dfs_labeled_edges(G, source=0))) + [(0, 0, 'forward'), + (0, 1, 'forward'), + (1, 2, 'forward'), + (2, 1, 'nontree'), + (1, 2, 'reverse'), + (0, 1, 'reverse'), + (0, 0, 'reverse')] + + Notes + ----- + If a source is not specified then a source is chosen arbitrarily and + repeatedly until all components in the graph are searched. + + The implementation of this function is adapted from David Eppstein's + depth-first search function in `PADS`_, with modifications + to allow depth limits based on the Wikipedia article + "`Depth-limited search`_". + + .. _PADS: http://www.ics.uci.edu/~eppstein/PADS + .. _Depth-limited search: https://en.wikipedia.org/wiki/Depth-limited_search + + See Also + -------- + dfs_edges + dfs_preorder_nodes + dfs_postorder_nodes + """ + # Based on http://www.ics.uci.edu/~eppstein/PADS/DFS.py + # by D. Eppstein, July 2004. + if source is None: + # edges for all components + nodes = G + else: + # edges for components with source + nodes = [source] + if depth_limit is None: + depth_limit = len(G) + + get_children = ( + G.neighbors + if sort_neighbors is None + else lambda n: iter(sort_neighbors(G.neighbors(n))) + ) + + visited = set() + for start in nodes: + if start in visited: + continue + yield start, start, "forward" + visited.add(start) + stack = [(start, get_children(start))] + depth_now = 1 + while stack: + parent, children = stack[-1] + for child in children: + if child in visited: + yield parent, child, "nontree" + else: + yield parent, child, "forward" + visited.add(child) + if depth_now < depth_limit: + stack.append((child, iter(get_children(child)))) + depth_now += 1 + break + else: + yield parent, child, "reverse-depth_limit" + else: + stack.pop() + depth_now -= 1 + if stack: + yield stack[-1][0], parent, "reverse" + yield start, start, "reverse" diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgebfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgebfs.py new file mode 100644 index 0000000..2f468e1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgebfs.py @@ -0,0 +1,185 @@ +""" +============================= +Breadth First Search on Edges +============================= + +Algorithms for a breadth-first traversal of edges in a graph. + +""" + +from collections import deque + +import networkx as nx + +FORWARD = "forward" +REVERSE = "reverse" + +__all__ = ["edge_bfs"] + + +@nx._dispatchable +def edge_bfs(G, source=None, orientation=None): + """A directed, breadth-first-search of edges in `G`, beginning at `source`. + + Yield the edges of G in a breadth-first-search order continuing until + all edges are generated. + + Parameters + ---------- + G : graph + A directed/undirected graph/multigraph. + + source : node, list of nodes + The node from which the traversal begins. If None, then a source + is chosen arbitrarily and repeatedly until all edges from each node in + the graph are searched. + + orientation : None | 'original' | 'reverse' | 'ignore' (default: None) + For directed graphs and directed multigraphs, edge traversals need not + respect the original orientation of the edges. + When set to 'reverse' every edge is traversed in the reverse direction. + When set to 'ignore', every edge is treated as undirected. + When set to 'original', every edge is treated as directed. + In all three cases, the yielded edge tuples add a last entry to + indicate the direction in which that edge was traversed. + If orientation is None, the yielded edge has no direction indicated. + The direction is respected, but not reported. + + Yields + ------ + edge : directed edge + A directed edge indicating the path taken by the breadth-first-search. + For graphs, `edge` is of the form `(u, v)` where `u` and `v` + are the tail and head of the edge as determined by the traversal. + For multigraphs, `edge` is of the form `(u, v, key)`, where `key` is + the key of the edge. When the graph is directed, then `u` and `v` + are always in the order of the actual directed edge. + If orientation is not None then the edge tuple is extended to include + the direction of traversal ('forward' or 'reverse') on that edge. + + Examples + -------- + >>> from pprint import pprint + >>> nodes = [0, 1, 2, 3] + >>> edges = [(0, 1), (1, 0), (1, 0), (2, 0), (2, 1), (3, 1)] + + >>> list(nx.edge_bfs(nx.Graph(edges), nodes)) + [(0, 1), (0, 2), (1, 2), (1, 3)] + + >>> list(nx.edge_bfs(nx.DiGraph(edges), nodes)) + [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)] + + >>> list(nx.edge_bfs(nx.MultiGraph(edges), nodes)) + [(0, 1, 0), (0, 1, 1), (0, 1, 2), (0, 2, 0), (1, 2, 0), (1, 3, 0)] + + >>> list(nx.edge_bfs(nx.MultiDiGraph(edges), nodes)) + [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 0, 0), (2, 1, 0), (3, 1, 0)] + + >>> list(nx.edge_bfs(nx.DiGraph(edges), nodes, orientation="ignore")) + [(0, 1, 'forward'), (1, 0, 'reverse'), (2, 0, 'reverse'), (2, 1, 'reverse'), (3, 1, 'reverse')] + + >>> elist = list(nx.edge_bfs(nx.MultiDiGraph(edges), nodes, orientation="ignore")) + >>> pprint(elist) + [(0, 1, 0, 'forward'), + (1, 0, 0, 'reverse'), + (1, 0, 1, 'reverse'), + (2, 0, 0, 'reverse'), + (2, 1, 0, 'reverse'), + (3, 1, 0, 'reverse')] + + Notes + ----- + The goal of this function is to visit edges. It differs from the more + familiar breadth-first-search of nodes, as provided by + :func:`networkx.algorithms.traversal.breadth_first_search.bfs_edges`, in + that it does not stop once every node has been visited. In a directed graph + with edges [(0, 1), (1, 2), (2, 1)], the edge (2, 1) would not be visited + if not for the functionality provided by this function. + + The naming of this function is very similar to bfs_edges. The difference + is that 'edge_bfs' yields edges even if they extend back to an already + explored node while 'bfs_edges' yields the edges of the tree that results + from a breadth-first-search (BFS) so no edges are reported if they extend + to already explored nodes. That means 'edge_bfs' reports all edges while + 'bfs_edges' only report those traversed by a node-based BFS. Yet another + description is that 'bfs_edges' reports the edges traversed during BFS + while 'edge_bfs' reports all edges in the order they are explored. + + See Also + -------- + bfs_edges + bfs_tree + edge_dfs + + """ + nodes = list(G.nbunch_iter(source)) + if not nodes: + return + + directed = G.is_directed() + kwds = {"data": False} + if G.is_multigraph() is True: + kwds["keys"] = True + + # set up edge lookup + if orientation is None: + + def edges_from(node): + return iter(G.edges(node, **kwds)) + + elif not directed or orientation == "original": + + def edges_from(node): + for e in G.edges(node, **kwds): + yield e + (FORWARD,) + + elif orientation == "reverse": + + def edges_from(node): + for e in G.in_edges(node, **kwds): + yield e + (REVERSE,) + + elif orientation == "ignore": + + def edges_from(node): + for e in G.edges(node, **kwds): + yield e + (FORWARD,) + for e in G.in_edges(node, **kwds): + yield e + (REVERSE,) + + else: + raise nx.NetworkXError("invalid orientation argument.") + + if directed: + neighbors = G.successors + + def edge_id(edge): + # remove direction indicator + return edge[:-1] if orientation is not None else edge + + else: + neighbors = G.neighbors + + def edge_id(edge): + return (frozenset(edge[:2]),) + edge[2:] + + check_reverse = directed and orientation in ("reverse", "ignore") + + # start BFS + visited_nodes = set(nodes) + visited_edges = set() + queue = deque([(n, edges_from(n)) for n in nodes]) + while queue: + parent, children_edges = queue.popleft() + for edge in children_edges: + if check_reverse and edge[-1] == REVERSE: + child = edge[0] + else: + child = edge[1] + if child not in visited_nodes: + visited_nodes.add(child) + queue.append((child, edges_from(child))) + edgeid = edge_id(edge) + if edgeid not in visited_edges: + visited_edges.add(edgeid) + yield edge diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgedfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgedfs.py new file mode 100644 index 0000000..f817310 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/edgedfs.py @@ -0,0 +1,182 @@ +""" +=========================== +Depth First Search on Edges +=========================== + +Algorithms for a depth-first traversal of edges in a graph. + +""" + +import networkx as nx + +FORWARD = "forward" +REVERSE = "reverse" + +__all__ = ["edge_dfs"] + + +@nx._dispatchable +def edge_dfs(G, source=None, orientation=None): + """A directed, depth-first-search of edges in `G`, beginning at `source`. + + Yield the edges of G in a depth-first-search order continuing until + all edges are generated. + + Parameters + ---------- + G : graph + A directed/undirected graph/multigraph. + + source : node, list of nodes + The node from which the traversal begins. If None, then a source + is chosen arbitrarily and repeatedly until all edges from each node in + the graph are searched. + + orientation : None | 'original' | 'reverse' | 'ignore' (default: None) + For directed graphs and directed multigraphs, edge traversals need not + respect the original orientation of the edges. + When set to 'reverse' every edge is traversed in the reverse direction. + When set to 'ignore', every edge is treated as undirected. + When set to 'original', every edge is treated as directed. + In all three cases, the yielded edge tuples add a last entry to + indicate the direction in which that edge was traversed. + If orientation is None, the yielded edge has no direction indicated. + The direction is respected, but not reported. + + Yields + ------ + edge : directed edge + A directed edge indicating the path taken by the depth-first traversal. + For graphs, `edge` is of the form `(u, v)` where `u` and `v` + are the tail and head of the edge as determined by the traversal. + For multigraphs, `edge` is of the form `(u, v, key)`, where `key` is + the key of the edge. When the graph is directed, then `u` and `v` + are always in the order of the actual directed edge. + If orientation is not None then the edge tuple is extended to include + the direction of traversal ('forward' or 'reverse') on that edge. + + Examples + -------- + >>> from pprint import pprint + >>> nodes = [0, 1, 2, 3] + >>> edges = [(0, 1), (1, 0), (1, 0), (2, 1), (3, 1)] + + >>> list(nx.edge_dfs(nx.Graph(edges), nodes)) + [(0, 1), (1, 2), (1, 3)] + + >>> list(nx.edge_dfs(nx.DiGraph(edges), nodes)) + [(0, 1), (1, 0), (2, 1), (3, 1)] + + >>> list(nx.edge_dfs(nx.MultiGraph(edges), nodes)) + [(0, 1, 0), (1, 0, 1), (0, 1, 2), (1, 2, 0), (1, 3, 0)] + + >>> list(nx.edge_dfs(nx.MultiDiGraph(edges), nodes)) + [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 1, 0), (3, 1, 0)] + + >>> list(nx.edge_dfs(nx.DiGraph(edges), nodes, orientation="ignore")) + [(0, 1, 'forward'), (1, 0, 'forward'), (2, 1, 'reverse'), (3, 1, 'reverse')] + + >>> elist = list(nx.edge_dfs(nx.MultiDiGraph(edges), nodes, orientation="ignore")) + >>> pprint(elist) + [(0, 1, 0, 'forward'), + (1, 0, 0, 'forward'), + (1, 0, 1, 'reverse'), + (2, 1, 0, 'reverse'), + (3, 1, 0, 'reverse')] + + Notes + ----- + The goal of this function is to visit edges. It differs from the more + familiar depth-first traversal of nodes, as provided by + :func:`~networkx.algorithms.traversal.depth_first_search.dfs_edges`, in + that it does not stop once every node has been visited. In a directed graph + with edges [(0, 1), (1, 2), (2, 1)], the edge (2, 1) would not be visited + if not for the functionality provided by this function. + + See Also + -------- + :func:`~networkx.algorithms.traversal.depth_first_search.dfs_edges` + + """ + nodes = list(G.nbunch_iter(source)) + if not nodes: + return + + directed = G.is_directed() + kwds = {"data": False} + if G.is_multigraph() is True: + kwds["keys"] = True + + # set up edge lookup + if orientation is None: + + def edges_from(node): + return iter(G.edges(node, **kwds)) + + elif not directed or orientation == "original": + + def edges_from(node): + for e in G.edges(node, **kwds): + yield e + (FORWARD,) + + elif orientation == "reverse": + + def edges_from(node): + for e in G.in_edges(node, **kwds): + yield e + (REVERSE,) + + elif orientation == "ignore": + + def edges_from(node): + for e in G.edges(node, **kwds): + yield e + (FORWARD,) + for e in G.in_edges(node, **kwds): + yield e + (REVERSE,) + + else: + raise nx.NetworkXError("invalid orientation argument.") + + # set up formation of edge_id to easily look up if edge already returned + if directed: + + def edge_id(edge): + # remove direction indicator + return edge[:-1] if orientation is not None else edge + + else: + + def edge_id(edge): + # single id for undirected requires frozenset on nodes + return (frozenset(edge[:2]),) + edge[2:] + + # Basic setup + check_reverse = directed and orientation in ("reverse", "ignore") + + visited_edges = set() + visited_nodes = set() + edges = {} + + # start DFS + for start_node in nodes: + stack = [start_node] + while stack: + current_node = stack[-1] + if current_node not in visited_nodes: + edges[current_node] = edges_from(current_node) + visited_nodes.add(current_node) + + try: + edge = next(edges[current_node]) + except StopIteration: + # No more edges from the current node. + stack.pop() + else: + edgeid = edge_id(edge) + if edgeid not in visited_edges: + visited_edges.add(edgeid) + # Mark the traversed "to" node as to-be-explored. + if check_reverse and edge[-1] == REVERSE: + stack.append(edge[0]) + else: + stack.append(edge[1]) + yield edge diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ee29522 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_beamsearch.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_beamsearch.cpython-312.pyc new file mode 100644 index 0000000..de7f873 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_beamsearch.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_bfs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_bfs.cpython-312.pyc new file mode 100644 index 0000000..68526a5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_bfs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_dfs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_dfs.cpython-312.pyc new file mode 100644 index 0000000..5d07410 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_dfs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_edgebfs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_edgebfs.cpython-312.pyc new file mode 100644 index 0000000..17edd15 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_edgebfs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_edgedfs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_edgedfs.cpython-312.pyc new file mode 100644 index 0000000..db28cc5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/__pycache__/test_edgedfs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_beamsearch.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_beamsearch.py new file mode 100644 index 0000000..049f116 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_beamsearch.py @@ -0,0 +1,25 @@ +"""Unit tests for the beam search functions.""" + +import pytest + +import networkx as nx + + +def test_narrow(): + """Tests that a narrow beam width may cause an incomplete search.""" + # In this search, we enqueue only the neighbor 3 at the first + # step, then only the neighbor 2 at the second step. Once at + # node 2, the search chooses node 3, since it has a higher value + # than node 1, but node 3 has already been visited, so the + # search terminates. + G = nx.cycle_graph(4) + edges = nx.bfs_beam_edges(G, source=0, value=lambda n: n, width=1) + assert list(edges) == [(0, 3), (3, 2)] + + +@pytest.mark.parametrize("width", (2, None)) +def test_wide(width): + """All nodes are searched when `width` is None or >= max degree""" + G = nx.cycle_graph(4) + edges = nx.bfs_beam_edges(G, source=0, value=lambda n: n, width=width) + assert list(edges) == [(0, 3), (0, 1), (3, 2)] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_bfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_bfs.py new file mode 100644 index 0000000..7f4f37a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_bfs.py @@ -0,0 +1,203 @@ +from functools import partial + +import pytest + +import networkx as nx + + +class TestBFS: + @classmethod + def setup_class(cls): + # simple graph + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)]) + cls.G = G + + def test_successor(self): + assert dict(nx.bfs_successors(self.G, source=0)) == {0: [1], 1: [2, 3], 2: [4]} + + def test_predecessor(self): + assert dict(nx.bfs_predecessors(self.G, source=0)) == {1: 0, 2: 1, 3: 1, 4: 2} + + def test_bfs_tree(self): + T = nx.bfs_tree(self.G, source=0) + assert sorted(T.nodes()) == sorted(self.G.nodes()) + assert sorted(T.edges()) == [(0, 1), (1, 2), (1, 3), (2, 4)] + + def test_bfs_edges(self): + edges = nx.bfs_edges(self.G, source=0) + assert list(edges) == [(0, 1), (1, 2), (1, 3), (2, 4)] + + def test_bfs_edges_reverse(self): + D = nx.DiGraph() + D.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)]) + edges = nx.bfs_edges(D, source=4, reverse=True) + assert list(edges) == [(4, 2), (4, 3), (2, 1), (1, 0)] + + def test_bfs_edges_sorting(self): + D = nx.DiGraph() + D.add_edges_from([(0, 1), (0, 2), (1, 4), (1, 3), (2, 5)]) + sort_desc = partial(sorted, reverse=True) + edges_asc = nx.bfs_edges(D, source=0, sort_neighbors=sorted) + edges_desc = nx.bfs_edges(D, source=0, sort_neighbors=sort_desc) + assert list(edges_asc) == [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5)] + assert list(edges_desc) == [(0, 2), (0, 1), (2, 5), (1, 4), (1, 3)] + + def test_bfs_tree_isolates(self): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + T = nx.bfs_tree(G, source=1) + assert sorted(T.nodes()) == [1] + assert sorted(T.edges()) == [] + + def test_bfs_layers(self): + expected = { + 0: [0], + 1: [1], + 2: [2, 3], + 3: [4], + } + for sources in [0, [0], (i for i in [0]), [0, 0]]: + assert dict(enumerate(nx.bfs_layers(self.G, sources))) == expected + + def test_bfs_layers_missing_source(self): + with pytest.raises(nx.NetworkXError): + next(nx.bfs_layers(self.G, sources="abc")) + with pytest.raises(nx.NetworkXError): + next(nx.bfs_layers(self.G, sources=["abc"])) + + def test_descendants_at_distance(self): + for distance, descendants in enumerate([{0}, {1}, {2, 3}, {4}]): + assert nx.descendants_at_distance(self.G, 0, distance) == descendants + + def test_descendants_at_distance_missing_source(self): + with pytest.raises(nx.NetworkXError): + nx.descendants_at_distance(self.G, "abc", 0) + + def test_bfs_labeled_edges_directed(self): + D = nx.cycle_graph(5, create_using=nx.DiGraph) + expected = [ + (0, 1, "tree"), + (1, 2, "tree"), + (2, 3, "tree"), + (3, 4, "tree"), + (4, 0, "reverse"), + ] + answer = list(nx.bfs_labeled_edges(D, 0)) + assert expected == answer + + D.add_edge(4, 4) + expected.append((4, 4, "level")) + answer = list(nx.bfs_labeled_edges(D, 0)) + assert expected == answer + + D.add_edge(0, 2) + D.add_edge(1, 5) + D.add_edge(2, 5) + D.remove_edge(4, 4) + expected = [ + (0, 1, "tree"), + (0, 2, "tree"), + (1, 2, "level"), + (1, 5, "tree"), + (2, 3, "tree"), + (2, 5, "forward"), + (3, 4, "tree"), + (4, 0, "reverse"), + ] + answer = list(nx.bfs_labeled_edges(D, 0)) + assert expected == answer + + G = D.to_undirected() + G.add_edge(4, 4) + expected = [ + (0, 1, "tree"), + (0, 2, "tree"), + (0, 4, "tree"), + (1, 2, "level"), + (1, 5, "tree"), + (2, 3, "tree"), + (2, 5, "forward"), + (4, 3, "forward"), + (4, 4, "level"), + ] + answer = list(nx.bfs_labeled_edges(G, 0)) + assert expected == answer + + +class TestBreadthLimitedSearch: + @classmethod + def setup_class(cls): + # a tree + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3, 4, 5, 6]) + nx.add_path(G, [2, 7, 8, 9, 10]) + cls.G = G + # a disconnected graph + D = nx.Graph() + D.add_edges_from([(0, 1), (2, 3)]) + nx.add_path(D, [2, 7, 8, 9, 10]) + cls.D = D + + def test_limited_bfs_successor(self): + assert dict(nx.bfs_successors(self.G, source=1, depth_limit=3)) == { + 1: [0, 2], + 2: [3, 7], + 3: [4], + 7: [8], + } + result = { + n: sorted(s) for n, s in nx.bfs_successors(self.D, source=7, depth_limit=2) + } + assert result == {8: [9], 2: [3], 7: [2, 8]} + + def test_limited_bfs_predecessor(self): + assert dict(nx.bfs_predecessors(self.G, source=1, depth_limit=3)) == { + 0: 1, + 2: 1, + 3: 2, + 4: 3, + 7: 2, + 8: 7, + } + assert dict(nx.bfs_predecessors(self.D, source=7, depth_limit=2)) == { + 2: 7, + 3: 2, + 8: 7, + 9: 8, + } + + def test_limited_bfs_tree(self): + T = nx.bfs_tree(self.G, source=3, depth_limit=1) + assert sorted(T.edges()) == [(3, 2), (3, 4)] + + def test_limited_bfs_edges(self): + edges = nx.bfs_edges(self.G, source=9, depth_limit=4) + assert list(edges) == [(9, 8), (9, 10), (8, 7), (7, 2), (2, 1), (2, 3)] + + def test_limited_bfs_layers(self): + assert dict(enumerate(nx.bfs_layers(self.G, sources=[0]))) == { + 0: [0], + 1: [1], + 2: [2], + 3: [3, 7], + 4: [4, 8], + 5: [5, 9], + 6: [6, 10], + } + assert dict(enumerate(nx.bfs_layers(self.D, sources=2))) == { + 0: [2], + 1: [3, 7], + 2: [8], + 3: [9], + 4: [10], + } + + def test_limited_descendants_at_distance(self): + for distance, descendants in enumerate( + [{0}, {1}, {2}, {3, 7}, {4, 8}, {5, 9}, {6, 10}] + ): + assert nx.descendants_at_distance(self.G, 0, distance) == descendants + for distance, descendants in enumerate([{2}, {3, 7}, {8}, {9}, {10}]): + assert nx.descendants_at_distance(self.D, 2, distance) == descendants diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_dfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_dfs.py new file mode 100644 index 0000000..292de05 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_dfs.py @@ -0,0 +1,307 @@ +import networkx as nx + + +class TestDFS: + @classmethod + def setup_class(cls): + # simple graph + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)]) + cls.G = G + # simple graph, disconnected + D = nx.Graph() + D.add_edges_from([(0, 1), (2, 3)]) + cls.D = D + + def test_preorder_nodes(self): + assert list(nx.dfs_preorder_nodes(self.G, source=0)) == [0, 1, 2, 4, 3] + assert list(nx.dfs_preorder_nodes(self.D)) == [0, 1, 2, 3] + assert list(nx.dfs_preorder_nodes(self.D, source=2)) == [2, 3] + + def test_postorder_nodes(self): + assert list(nx.dfs_postorder_nodes(self.G, source=0)) == [4, 2, 3, 1, 0] + assert list(nx.dfs_postorder_nodes(self.D)) == [1, 0, 3, 2] + assert list(nx.dfs_postorder_nodes(self.D, source=0)) == [1, 0] + + def test_successor(self): + assert nx.dfs_successors(self.G, source=0) == {0: [1], 1: [2, 3], 2: [4]} + assert nx.dfs_successors(self.G, source=1) == {0: [3, 4], 1: [0], 4: [2]} + assert nx.dfs_successors(self.D) == {0: [1], 2: [3]} + assert nx.dfs_successors(self.D, source=1) == {1: [0]} + + def test_predecessor(self): + assert nx.dfs_predecessors(self.G, source=0) == {1: 0, 2: 1, 3: 1, 4: 2} + assert nx.dfs_predecessors(self.D) == {1: 0, 3: 2} + + def test_dfs_tree(self): + exp_nodes = sorted(self.G.nodes()) + exp_edges = [(0, 1), (1, 2), (1, 3), (2, 4)] + # Search from first node + T = nx.dfs_tree(self.G, source=0) + assert sorted(T.nodes()) == exp_nodes + assert sorted(T.edges()) == exp_edges + # Check source=None + T = nx.dfs_tree(self.G, source=None) + assert sorted(T.nodes()) == exp_nodes + assert sorted(T.edges()) == exp_edges + # Check source=None is the default + T = nx.dfs_tree(self.G) + assert sorted(T.nodes()) == exp_nodes + assert sorted(T.edges()) == exp_edges + + def test_dfs_edges(self): + edges = nx.dfs_edges(self.G, source=0) + assert list(edges) == [(0, 1), (1, 2), (2, 4), (1, 3)] + edges = nx.dfs_edges(self.D) + assert list(edges) == [(0, 1), (2, 3)] + + def test_dfs_edges_sorting(self): + G = nx.Graph([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)]) + edges_asc = nx.dfs_edges(G, source=0, sort_neighbors=sorted) + edges_desc = nx.dfs_edges( + G, source=0, sort_neighbors=lambda x: sorted(x, reverse=True) + ) + assert list(edges_asc) == [(0, 1), (1, 2), (2, 4), (1, 3)] + assert list(edges_desc) == [(0, 4), (4, 2), (2, 1), (1, 3)] + + def test_dfs_labeled_edges(self): + edges = list(nx.dfs_labeled_edges(self.G, source=0)) + forward = [(u, v) for (u, v, d) in edges if d == "forward"] + assert forward == [(0, 0), (0, 1), (1, 2), (2, 4), (1, 3)] + assert edges == [ + (0, 0, "forward"), + (0, 1, "forward"), + (1, 0, "nontree"), + (1, 2, "forward"), + (2, 1, "nontree"), + (2, 4, "forward"), + (4, 2, "nontree"), + (4, 0, "nontree"), + (2, 4, "reverse"), + (1, 2, "reverse"), + (1, 3, "forward"), + (3, 1, "nontree"), + (3, 0, "nontree"), + (1, 3, "reverse"), + (0, 1, "reverse"), + (0, 3, "nontree"), + (0, 4, "nontree"), + (0, 0, "reverse"), + ] + + def test_dfs_labeled_edges_sorting(self): + G = nx.Graph([(0, 1), (1, 2), (1, 3), (2, 4), (3, 0), (0, 4)]) + edges_asc = nx.dfs_labeled_edges(G, source=0, sort_neighbors=sorted) + edges_desc = nx.dfs_labeled_edges( + G, source=0, sort_neighbors=lambda x: sorted(x, reverse=True) + ) + assert list(edges_asc) == [ + (0, 0, "forward"), + (0, 1, "forward"), + (1, 0, "nontree"), + (1, 2, "forward"), + (2, 1, "nontree"), + (2, 4, "forward"), + (4, 0, "nontree"), + (4, 2, "nontree"), + (2, 4, "reverse"), + (1, 2, "reverse"), + (1, 3, "forward"), + (3, 0, "nontree"), + (3, 1, "nontree"), + (1, 3, "reverse"), + (0, 1, "reverse"), + (0, 3, "nontree"), + (0, 4, "nontree"), + (0, 0, "reverse"), + ] + assert list(edges_desc) == [ + (0, 0, "forward"), + (0, 4, "forward"), + (4, 2, "forward"), + (2, 4, "nontree"), + (2, 1, "forward"), + (1, 3, "forward"), + (3, 1, "nontree"), + (3, 0, "nontree"), + (1, 3, "reverse"), + (1, 2, "nontree"), + (1, 0, "nontree"), + (2, 1, "reverse"), + (4, 2, "reverse"), + (4, 0, "nontree"), + (0, 4, "reverse"), + (0, 3, "nontree"), + (0, 1, "nontree"), + (0, 0, "reverse"), + ] + + def test_dfs_labeled_disconnected_edges(self): + edges = list(nx.dfs_labeled_edges(self.D)) + forward = [(u, v) for (u, v, d) in edges if d == "forward"] + assert forward == [(0, 0), (0, 1), (2, 2), (2, 3)] + assert edges == [ + (0, 0, "forward"), + (0, 1, "forward"), + (1, 0, "nontree"), + (0, 1, "reverse"), + (0, 0, "reverse"), + (2, 2, "forward"), + (2, 3, "forward"), + (3, 2, "nontree"), + (2, 3, "reverse"), + (2, 2, "reverse"), + ] + + def test_dfs_tree_isolates(self): + G = nx.Graph() + G.add_node(1) + G.add_node(2) + T = nx.dfs_tree(G, source=1) + assert sorted(T.nodes()) == [1] + assert sorted(T.edges()) == [] + T = nx.dfs_tree(G, source=None) + assert sorted(T.nodes()) == [1, 2] + assert sorted(T.edges()) == [] + + +class TestDepthLimitedSearch: + @classmethod + def setup_class(cls): + # a tree + G = nx.Graph() + nx.add_path(G, [0, 1, 2, 3, 4, 5, 6]) + nx.add_path(G, [2, 7, 8, 9, 10]) + cls.G = G + # a disconnected graph + D = nx.Graph() + D.add_edges_from([(0, 1), (2, 3)]) + nx.add_path(D, [2, 7, 8, 9, 10]) + cls.D = D + + def test_dls_preorder_nodes(self): + assert list(nx.dfs_preorder_nodes(self.G, source=0, depth_limit=2)) == [0, 1, 2] + assert list(nx.dfs_preorder_nodes(self.D, source=1, depth_limit=2)) == ([1, 0]) + + def test_dls_postorder_nodes(self): + assert list(nx.dfs_postorder_nodes(self.G, source=3, depth_limit=3)) == [ + 1, + 7, + 2, + 5, + 4, + 3, + ] + assert list(nx.dfs_postorder_nodes(self.D, source=2, depth_limit=2)) == ( + [3, 7, 2] + ) + + def test_dls_successor(self): + result = nx.dfs_successors(self.G, source=4, depth_limit=3) + assert {n: set(v) for n, v in result.items()} == { + 2: {1, 7}, + 3: {2}, + 4: {3, 5}, + 5: {6}, + } + result = nx.dfs_successors(self.D, source=7, depth_limit=2) + assert {n: set(v) for n, v in result.items()} == {8: {9}, 2: {3}, 7: {8, 2}} + + def test_dls_predecessor(self): + assert nx.dfs_predecessors(self.G, source=0, depth_limit=3) == { + 1: 0, + 2: 1, + 3: 2, + 7: 2, + } + assert nx.dfs_predecessors(self.D, source=2, depth_limit=3) == { + 8: 7, + 9: 8, + 3: 2, + 7: 2, + } + + def test_dls_tree(self): + T = nx.dfs_tree(self.G, source=3, depth_limit=1) + assert sorted(T.edges()) == [(3, 2), (3, 4)] + + def test_dls_edges(self): + edges = nx.dfs_edges(self.G, source=9, depth_limit=4) + assert list(edges) == [(9, 8), (8, 7), (7, 2), (2, 1), (2, 3), (9, 10)] + + def test_dls_labeled_edges_depth_1(self): + edges = list(nx.dfs_labeled_edges(self.G, source=5, depth_limit=1)) + forward = [(u, v) for (u, v, d) in edges if d == "forward"] + assert forward == [(5, 5), (5, 4), (5, 6)] + # Note: reverse-depth_limit edge types were not reported before gh-6240 + assert edges == [ + (5, 5, "forward"), + (5, 4, "forward"), + (5, 4, "reverse-depth_limit"), + (5, 6, "forward"), + (5, 6, "reverse-depth_limit"), + (5, 5, "reverse"), + ] + + def test_dls_labeled_edges_depth_2(self): + edges = list(nx.dfs_labeled_edges(self.G, source=6, depth_limit=2)) + forward = [(u, v) for (u, v, d) in edges if d == "forward"] + assert forward == [(6, 6), (6, 5), (5, 4)] + assert edges == [ + (6, 6, "forward"), + (6, 5, "forward"), + (5, 4, "forward"), + (5, 4, "reverse-depth_limit"), + (5, 6, "nontree"), + (6, 5, "reverse"), + (6, 6, "reverse"), + ] + + def test_dls_labeled_disconnected_edges(self): + edges = list(nx.dfs_labeled_edges(self.D, depth_limit=1)) + assert edges == [ + (0, 0, "forward"), + (0, 1, "forward"), + (0, 1, "reverse-depth_limit"), + (0, 0, "reverse"), + (2, 2, "forward"), + (2, 3, "forward"), + (2, 3, "reverse-depth_limit"), + (2, 7, "forward"), + (2, 7, "reverse-depth_limit"), + (2, 2, "reverse"), + (8, 8, "forward"), + (8, 7, "nontree"), + (8, 9, "forward"), + (8, 9, "reverse-depth_limit"), + (8, 8, "reverse"), + (10, 10, "forward"), + (10, 9, "nontree"), + (10, 10, "reverse"), + ] + # large depth_limit has no impact + edges = list(nx.dfs_labeled_edges(self.D, depth_limit=19)) + assert edges == [ + (0, 0, "forward"), + (0, 1, "forward"), + (1, 0, "nontree"), + (0, 1, "reverse"), + (0, 0, "reverse"), + (2, 2, "forward"), + (2, 3, "forward"), + (3, 2, "nontree"), + (2, 3, "reverse"), + (2, 7, "forward"), + (7, 2, "nontree"), + (7, 8, "forward"), + (8, 7, "nontree"), + (8, 9, "forward"), + (9, 8, "nontree"), + (9, 10, "forward"), + (10, 9, "nontree"), + (9, 10, "reverse"), + (8, 9, "reverse"), + (7, 8, "reverse"), + (2, 7, "reverse"), + (2, 2, "reverse"), + ] diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgebfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgebfs.py new file mode 100644 index 0000000..1bf3fae --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgebfs.py @@ -0,0 +1,147 @@ +import pytest + +import networkx as nx +from networkx.algorithms.traversal.edgedfs import FORWARD, REVERSE + + +class TestEdgeBFS: + @classmethod + def setup_class(cls): + cls.nodes = [0, 1, 2, 3] + cls.edges = [(0, 1), (1, 0), (1, 0), (2, 0), (2, 1), (3, 1)] + + def test_empty(self): + G = nx.Graph() + edges = list(nx.edge_bfs(G)) + assert edges == [] + + def test_graph_single_source(self): + G = nx.Graph(self.edges) + G.add_edge(4, 5) + x = list(nx.edge_bfs(G, [0])) + x_ = [(0, 1), (0, 2), (1, 2), (1, 3)] + assert x == x_ + + def test_graph(self): + G = nx.Graph(self.edges) + x = list(nx.edge_bfs(G, self.nodes)) + x_ = [(0, 1), (0, 2), (1, 2), (1, 3)] + assert x == x_ + + def test_digraph(self): + G = nx.DiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes)) + x_ = [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)] + assert x == x_ + + def test_digraph_orientation_invalid(self): + G = nx.DiGraph(self.edges) + edge_iterator = nx.edge_bfs(G, self.nodes, orientation="hello") + pytest.raises(nx.NetworkXError, list, edge_iterator) + + def test_digraph_orientation_none(self): + G = nx.DiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes, orientation=None)) + x_ = [(0, 1), (1, 0), (2, 0), (2, 1), (3, 1)] + assert x == x_ + + def test_digraph_orientation_original(self): + G = nx.DiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes, orientation="original")) + x_ = [ + (0, 1, FORWARD), + (1, 0, FORWARD), + (2, 0, FORWARD), + (2, 1, FORWARD), + (3, 1, FORWARD), + ] + assert x == x_ + + def test_digraph2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(nx.edge_bfs(G, [0])) + x_ = [(0, 1), (1, 2), (2, 3)] + assert x == x_ + + def test_digraph_rev(self): + G = nx.DiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, REVERSE), + (2, 0, REVERSE), + (0, 1, REVERSE), + (2, 1, REVERSE), + (3, 1, REVERSE), + ] + assert x == x_ + + def test_digraph_rev2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(nx.edge_bfs(G, [3], orientation="reverse")) + x_ = [(2, 3, REVERSE), (1, 2, REVERSE), (0, 1, REVERSE)] + assert x == x_ + + def test_multigraph(self): + G = nx.MultiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes)) + x_ = [(0, 1, 0), (0, 1, 1), (0, 1, 2), (0, 2, 0), (1, 2, 0), (1, 3, 0)] + # This is an example of where hash randomization can break. + # There are 3! * 2 alternative outputs, such as: + # [(0, 1, 1), (1, 0, 0), (0, 1, 2), (1, 3, 0), (1, 2, 0)] + # But note, the edges (1,2,0) and (1,3,0) always follow the (0,1,k) + # edges. So the algorithm only guarantees a partial order. A total + # order is guaranteed only if the graph data structures are ordered. + assert x == x_ + + def test_multidigraph(self): + G = nx.MultiDiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes)) + x_ = [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 0, 0), (2, 1, 0), (3, 1, 0)] + assert x == x_ + + def test_multidigraph_rev(self): + G = nx.MultiDiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 0, 0, REVERSE), + (0, 1, 0, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] + assert x == x_ + + def test_digraph_ignore(self): + G = nx.DiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, FORWARD), + (1, 0, REVERSE), + (2, 0, REVERSE), + (2, 1, REVERSE), + (3, 1, REVERSE), + ] + assert x == x_ + + def test_digraph_ignore2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(nx.edge_bfs(G, [0], orientation="ignore")) + x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 3, FORWARD)] + assert x == x_ + + def test_multidigraph_ignore(self): + G = nx.MultiDiGraph(self.edges) + x = list(nx.edge_bfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, 0, FORWARD), + (1, 0, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 0, 0, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] + assert x == x_ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgedfs.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgedfs.py new file mode 100644 index 0000000..7c1967c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/traversal/tests/test_edgedfs.py @@ -0,0 +1,131 @@ +import pytest + +import networkx as nx +from networkx.algorithms import edge_dfs +from networkx.algorithms.traversal.edgedfs import FORWARD, REVERSE + +# These tests can fail with hash randomization. The easiest and clearest way +# to write these unit tests is for the edges to be output in an expected total +# order, but we cannot guarantee the order amongst outgoing edges from a node, +# unless each class uses an ordered data structure for neighbors. This is +# painful to do with the current API. The alternative is that the tests are +# written (IMO confusingly) so that there is not a total order over the edges, +# but only a partial order. Due to the small size of the graphs, hopefully +# failures due to hash randomization will not occur. For an example of how +# this can fail, see TestEdgeDFS.test_multigraph. + + +class TestEdgeDFS: + @classmethod + def setup_class(cls): + cls.nodes = [0, 1, 2, 3] + cls.edges = [(0, 1), (1, 0), (1, 0), (2, 1), (3, 1)] + + def test_empty(self): + G = nx.Graph() + edges = list(edge_dfs(G)) + assert edges == [] + + def test_graph(self): + G = nx.Graph(self.edges) + x = list(edge_dfs(G, self.nodes)) + x_ = [(0, 1), (1, 2), (1, 3)] + assert x == x_ + + def test_digraph(self): + G = nx.DiGraph(self.edges) + x = list(edge_dfs(G, self.nodes)) + x_ = [(0, 1), (1, 0), (2, 1), (3, 1)] + assert x == x_ + + def test_digraph_orientation_invalid(self): + G = nx.DiGraph(self.edges) + edge_iterator = edge_dfs(G, self.nodes, orientation="hello") + pytest.raises(nx.NetworkXError, list, edge_iterator) + + def test_digraph_orientation_none(self): + G = nx.DiGraph(self.edges) + x = list(edge_dfs(G, self.nodes, orientation=None)) + x_ = [(0, 1), (1, 0), (2, 1), (3, 1)] + assert x == x_ + + def test_digraph_orientation_original(self): + G = nx.DiGraph(self.edges) + x = list(edge_dfs(G, self.nodes, orientation="original")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 1, FORWARD), (3, 1, FORWARD)] + assert x == x_ + + def test_digraph2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(edge_dfs(G, [0])) + x_ = [(0, 1), (1, 2), (2, 3)] + assert x == x_ + + def test_digraph_rev(self): + G = nx.DiGraph(self.edges) + x = list(edge_dfs(G, self.nodes, orientation="reverse")) + x_ = [(1, 0, REVERSE), (0, 1, REVERSE), (2, 1, REVERSE), (3, 1, REVERSE)] + assert x == x_ + + def test_digraph_rev2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(edge_dfs(G, [3], orientation="reverse")) + x_ = [(2, 3, REVERSE), (1, 2, REVERSE), (0, 1, REVERSE)] + assert x == x_ + + def test_multigraph(self): + G = nx.MultiGraph(self.edges) + x = list(edge_dfs(G, self.nodes)) + x_ = [(0, 1, 0), (1, 0, 1), (0, 1, 2), (1, 2, 0), (1, 3, 0)] + # This is an example of where hash randomization can break. + # There are 3! * 2 alternative outputs, such as: + # [(0, 1, 1), (1, 0, 0), (0, 1, 2), (1, 3, 0), (1, 2, 0)] + # But note, the edges (1,2,0) and (1,3,0) always follow the (0,1,k) + # edges. So the algorithm only guarantees a partial order. A total + # order is guaranteed only if the graph data structures are ordered. + assert x == x_ + + def test_multidigraph(self): + G = nx.MultiDiGraph(self.edges) + x = list(edge_dfs(G, self.nodes)) + x_ = [(0, 1, 0), (1, 0, 0), (1, 0, 1), (2, 1, 0), (3, 1, 0)] + assert x == x_ + + def test_multidigraph_rev(self): + G = nx.MultiDiGraph(self.edges) + x = list(edge_dfs(G, self.nodes, orientation="reverse")) + x_ = [ + (1, 0, 0, REVERSE), + (0, 1, 0, REVERSE), + (1, 0, 1, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] + assert x == x_ + + def test_digraph_ignore(self): + G = nx.DiGraph(self.edges) + x = list(edge_dfs(G, self.nodes, orientation="ignore")) + x_ = [(0, 1, FORWARD), (1, 0, FORWARD), (2, 1, REVERSE), (3, 1, REVERSE)] + assert x == x_ + + def test_digraph_ignore2(self): + G = nx.DiGraph() + nx.add_path(G, range(4)) + x = list(edge_dfs(G, [0], orientation="ignore")) + x_ = [(0, 1, FORWARD), (1, 2, FORWARD), (2, 3, FORWARD)] + assert x == x_ + + def test_multidigraph_ignore(self): + G = nx.MultiDiGraph(self.edges) + x = list(edge_dfs(G, self.nodes, orientation="ignore")) + x_ = [ + (0, 1, 0, FORWARD), + (1, 0, 0, FORWARD), + (1, 0, 1, REVERSE), + (2, 1, 0, REVERSE), + (3, 1, 0, REVERSE), + ] + assert x == x_ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__init__.py new file mode 100644 index 0000000..7120d4b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__init__.py @@ -0,0 +1,6 @@ +from .branchings import * +from .coding import * +from .mst import * +from .recognition import * +from .operations import * +from .decomposition import * diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6957f27 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/branchings.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/branchings.cpython-312.pyc new file mode 100644 index 0000000..b4ed4f9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/branchings.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/coding.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/coding.cpython-312.pyc new file mode 100644 index 0000000..c76ddc7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/coding.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/decomposition.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/decomposition.cpython-312.pyc new file mode 100644 index 0000000..182778d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/decomposition.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/mst.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/mst.cpython-312.pyc new file mode 100644 index 0000000..c78601c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/mst.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/operations.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/operations.cpython-312.pyc new file mode 100644 index 0000000..2768e39 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/operations.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/recognition.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/recognition.cpython-312.pyc new file mode 100644 index 0000000..a552a46 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/__pycache__/recognition.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/branchings.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/branchings.py new file mode 100644 index 0000000..cc9c7cf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/branchings.py @@ -0,0 +1,1042 @@ +""" +Algorithms for finding optimum branchings and spanning arborescences. + +This implementation is based on: + + J. Edmonds, Optimum branchings, J. Res. Natl. Bur. Standards 71B (1967), + 233–240. URL: http://archive.org/details/jresv71Bn4p233 + +""" + +# TODO: Implement method from Gabow, Galil, Spence and Tarjan: +# +# @article{ +# year={1986}, +# issn={0209-9683}, +# journal={Combinatorica}, +# volume={6}, +# number={2}, +# doi={10.1007/BF02579168}, +# title={Efficient algorithms for finding minimum spanning trees in +# undirected and directed graphs}, +# url={https://doi.org/10.1007/BF02579168}, +# publisher={Springer-Verlag}, +# keywords={68 B 15; 68 C 05}, +# author={Gabow, Harold N. and Galil, Zvi and Spencer, Thomas and Tarjan, +# Robert E.}, +# pages={109-122}, +# language={English} +# } +import string +from dataclasses import dataclass, field +from operator import itemgetter +from queue import PriorityQueue + +import networkx as nx +from networkx.utils import py_random_state + +from .recognition import is_arborescence, is_branching + +__all__ = [ + "branching_weight", + "greedy_branching", + "maximum_branching", + "minimum_branching", + "minimal_branching", + "maximum_spanning_arborescence", + "minimum_spanning_arborescence", + "ArborescenceIterator", +] + +KINDS = {"max", "min"} + +STYLES = { + "branching": "branching", + "arborescence": "arborescence", + "spanning arborescence": "arborescence", +} + +INF = float("inf") + + +@py_random_state(1) +def random_string(L=15, seed=None): + return "".join([seed.choice(string.ascii_letters) for n in range(L)]) + + +def _min_weight(weight): + return -weight + + +def _max_weight(weight): + return weight + + +@nx._dispatchable(edge_attrs={"attr": "default"}) +def branching_weight(G, attr="weight", default=1): + """ + Returns the total weight of a branching. + + You must access this function through the networkx.algorithms.tree module. + + Parameters + ---------- + G : DiGraph + The directed graph. + attr : str + The attribute to use as weights. If None, then each edge will be + treated equally with a weight of 1. + default : float + When `attr` is not None, then if an edge does not have that attribute, + `default` specifies what value it should take. + + Returns + ------- + weight: int or float + The total weight of the branching. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_weighted_edges_from([(0, 1, 2), (1, 2, 4), (2, 3, 3), (3, 4, 2)]) + >>> nx.tree.branching_weight(G) + 11 + + """ + return sum(edge[2].get(attr, default) for edge in G.edges(data=True)) + + +@py_random_state(4) +@nx._dispatchable(edge_attrs={"attr": "default"}, returns_graph=True) +def greedy_branching(G, attr="weight", default=1, kind="max", seed=None): + """ + Returns a branching obtained through a greedy algorithm. + + This algorithm is wrong, and cannot give a proper optimal branching. + However, we include it for pedagogical reasons, as it can be helpful to + see what its outputs are. + + The output is a branching, and possibly, a spanning arborescence. However, + it is not guaranteed to be optimal in either case. + + Parameters + ---------- + G : DiGraph + The directed graph to scan. + attr : str + The attribute to use as weights. If None, then each edge will be + treated equally with a weight of 1. + default : float + When `attr` is not None, then if an edge does not have that attribute, + `default` specifies what value it should take. + kind : str + The type of optimum to search for: 'min' or 'max' greedy branching. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + B : directed graph + The greedily obtained branching. + + """ + if kind not in KINDS: + raise nx.NetworkXException("Unknown value for `kind`.") + + if kind == "min": + reverse = False + else: + reverse = True + + if attr is None: + # Generate a random string the graph probably won't have. + attr = random_string(seed=seed) + + edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)] + + # We sort by weight, but also by nodes to normalize behavior across runs. + try: + edges.sort(key=itemgetter(2, 0, 1), reverse=reverse) + except TypeError: + # This will fail in Python 3.x if the nodes are of varying types. + # In that case, we use the arbitrary order. + edges.sort(key=itemgetter(2), reverse=reverse) + + # The branching begins with a forest of no edges. + B = nx.DiGraph() + B.add_nodes_from(G) + + # Now we add edges greedily so long we maintain the branching. + uf = nx.utils.UnionFind() + for i, (u, v, w) in enumerate(edges): + if uf[u] == uf[v]: + # Adding this edge would form a directed cycle. + continue + elif B.in_degree(v) == 1: + # The edge would increase the degree to be greater than one. + continue + else: + # If attr was None, then don't insert weights... + data = {} + if attr is not None: + data[attr] = w + B.add_edge(u, v, **data) + uf.union(u, v) + + return B + + +@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True) +def maximum_branching( + G, + attr="weight", + default=1, + preserve_attrs=False, + partition=None, +): + ####################################### + ### Data Structure Helper Functions ### + ####################################### + + def edmonds_add_edge(G, edge_index, u, v, key, **d): + """ + Adds an edge to `G` while also updating the edge index. + + This algorithm requires the use of an external dictionary to track + the edge keys since it is possible that the source or destination + node of an edge will be changed and the default key-handling + capabilities of the MultiDiGraph class do not account for this. + + Parameters + ---------- + G : MultiDiGraph + The graph to insert an edge into. + edge_index : dict + A mapping from integers to the edges of the graph. + u : node + The source node of the new edge. + v : node + The destination node of the new edge. + key : int + The key to use from `edge_index`. + d : keyword arguments, optional + Other attributes to store on the new edge. + """ + + if key in edge_index: + uu, vv, _ = edge_index[key] + if (u != uu) or (v != vv): + raise Exception(f"Key {key!r} is already in use.") + + G.add_edge(u, v, key, **d) + edge_index[key] = (u, v, G.succ[u][v][key]) + + def edmonds_remove_node(G, edge_index, n): + """ + Remove a node from the graph, updating the edge index to match. + + Parameters + ---------- + G : MultiDiGraph + The graph to remove an edge from. + edge_index : dict + A mapping from integers to the edges of the graph. + n : node + The node to remove from `G`. + """ + keys = set() + for keydict in G.pred[n].values(): + keys.update(keydict) + for keydict in G.succ[n].values(): + keys.update(keydict) + + for key in keys: + del edge_index[key] + + G.remove_node(n) + + ####################### + ### Algorithm Setup ### + ####################### + + # Pick an attribute name that the original graph is unlikly to have + candidate_attr = "edmonds' secret candidate attribute" + new_node_base_name = "edmonds new node base name " + + G_original = G + G = nx.MultiDiGraph() + G.__networkx_cache__ = None # Disable caching + + # A dict to reliably track mutations to the edges using the key of the edge. + G_edge_index = {} + # Each edge is given an arbitrary numerical key + for key, (u, v, data) in enumerate(G_original.edges(data=True)): + d = {attr: data.get(attr, default)} + + if data.get(partition) is not None: + d[partition] = data.get(partition) + + if preserve_attrs: + for d_k, d_v in data.items(): + if d_k != attr: + d[d_k] = d_v + + edmonds_add_edge(G, G_edge_index, u, v, key, **d) + + level = 0 # Stores the number of contracted nodes + + # These are the buckets from the paper. + # + # In the paper, G^i are modified versions of the original graph. + # D^i and E^i are the nodes and edges of the maximal edges that are + # consistent with G^i. In this implementation, D^i and E^i are stored + # together as the graph B^i. We will have strictly more B^i then the + # paper will have. + # + # Note that the data in graphs and branchings are tuples with the graph as + # the first element and the edge index as the second. + B = nx.MultiDiGraph() + B_edge_index = {} + graphs = [] # G^i list + branchings = [] # B^i list + selected_nodes = set() # D^i bucket + uf = nx.utils.UnionFind() + + # A list of lists of edge indices. Each list is a circuit for graph G^i. + # Note the edge list is not required to be a circuit in G^0. + circuits = [] + + # Stores the index of the minimum edge in the circuit found in G^i and B^i. + # The ordering of the edges seems to preserver the weight ordering from + # G^0. So even if the circuit does not form a circuit in G^0, it is still + # true that the minimum edges in circuit G^0 (despite their weights being + # different) + minedge_circuit = [] + + ########################### + ### Algorithm Structure ### + ########################### + + # Each step listed in the algorithm is an inner function. Thus, the overall + # loop structure is: + # + # while True: + # step_I1() + # if cycle detected: + # step_I2() + # elif every node of G is in D and E is a branching: + # break + + ################################## + ### Algorithm Helper Functions ### + ################################## + + def edmonds_find_desired_edge(v): + """ + Find the edge directed towards v with maximal weight. + + If an edge partition exists in this graph, return the included + edge if it exists and never return any excluded edge. + + Note: There can only be one included edge for each vertex otherwise + the edge partition is empty. + + Parameters + ---------- + v : node + The node to search for the maximal weight incoming edge. + """ + edge = None + max_weight = -INF + for u, _, key, data in G.in_edges(v, data=True, keys=True): + # Skip excluded edges + if data.get(partition) == nx.EdgePartition.EXCLUDED: + continue + + new_weight = data[attr] + + # Return the included edge + if data.get(partition) == nx.EdgePartition.INCLUDED: + max_weight = new_weight + edge = (u, v, key, new_weight, data) + break + + # Find the best open edge + if new_weight > max_weight: + max_weight = new_weight + edge = (u, v, key, new_weight, data) + + return edge, max_weight + + def edmonds_step_I2(v, desired_edge, level): + """ + Perform step I2 from Edmonds' paper + + First, check if the last step I1 created a cycle. If it did not, do nothing. + If it did, store the cycle for later reference and contract it. + + Parameters + ---------- + v : node + The current node to consider + desired_edge : edge + The minimum desired edge to remove from the cycle. + level : int + The current level, i.e. the number of cycles that have already been removed. + """ + u = desired_edge[0] + + Q_nodes = nx.shortest_path(B, v, u) + Q_edges = [ + list(B[Q_nodes[i]][vv].keys())[0] for i, vv in enumerate(Q_nodes[1:]) + ] + Q_edges.append(desired_edge[2]) # Add the new edge key to complete the circuit + + # Get the edge in the circuit with the minimum weight. + # Also, save the incoming weights for each node. + minweight = INF + minedge = None + Q_incoming_weight = {} + for edge_key in Q_edges: + u, v, data = B_edge_index[edge_key] + w = data[attr] + # We cannot remove an included edge, even if it is the + # minimum edge in the circuit + Q_incoming_weight[v] = w + if data.get(partition) == nx.EdgePartition.INCLUDED: + continue + if w < minweight: + minweight = w + minedge = edge_key + + circuits.append(Q_edges) + minedge_circuit.append(minedge) + graphs.append((G.copy(), G_edge_index.copy())) + branchings.append((B.copy(), B_edge_index.copy())) + + # Mutate the graph to contract the circuit + new_node = new_node_base_name + str(level) + G.add_node(new_node) + new_edges = [] + for u, v, key, data in G.edges(data=True, keys=True): + if u in Q_incoming_weight: + if v in Q_incoming_weight: + # Circuit edge. For the moment do nothing, + # eventually it will be removed. + continue + else: + # Outgoing edge from a node in the circuit. + # Make it come from the new node instead + dd = data.copy() + new_edges.append((new_node, v, key, dd)) + else: + if v in Q_incoming_weight: + # Incoming edge to the circuit. + # Update it's weight + w = data[attr] + w += minweight - Q_incoming_weight[v] + dd = data.copy() + dd[attr] = w + new_edges.append((u, new_node, key, dd)) + else: + # Outside edge. No modification needed + continue + + for node in Q_nodes: + edmonds_remove_node(G, G_edge_index, node) + edmonds_remove_node(B, B_edge_index, node) + + selected_nodes.difference_update(set(Q_nodes)) + + for u, v, key, data in new_edges: + edmonds_add_edge(G, G_edge_index, u, v, key, **data) + if candidate_attr in data: + del data[candidate_attr] + edmonds_add_edge(B, B_edge_index, u, v, key, **data) + uf.union(u, v) + + def is_root(G, u, edgekeys): + """ + Returns True if `u` is a root node in G. + + Node `u` is a root node if its in-degree over the specified edges is zero. + + Parameters + ---------- + G : Graph + The current graph. + u : node + The node in `G` to check if it is a root. + edgekeys : iterable of edges + The edges for which to check if `u` is a root of. + """ + if u not in G: + raise Exception(f"{u!r} not in G") + + for v in G.pred[u]: + for edgekey in G.pred[u][v]: + if edgekey in edgekeys: + return False, edgekey + else: + return True, None + + nodes = iter(list(G.nodes)) + while True: + try: + v = next(nodes) + except StopIteration: + # If there are no more new nodes to consider, then we should + # meet stopping condition (b) from the paper: + # (b) every node of G^i is in D^i and E^i is a branching + assert len(G) == len(B) + if len(B): + assert is_branching(B) + + graphs.append((G.copy(), G_edge_index.copy())) + branchings.append((B.copy(), B_edge_index.copy())) + circuits.append([]) + minedge_circuit.append(None) + + break + else: + ##################### + ### BEGIN STEP I1 ### + ##################### + + # This is a very simple step, so I don't think it needs a method of it's own + if v in selected_nodes: + continue + + selected_nodes.add(v) + B.add_node(v) + desired_edge, desired_edge_weight = edmonds_find_desired_edge(v) + + # There might be no desired edge if all edges are excluded or + # v is the last node to be added to B, the ultimate root of the branching + if desired_edge is not None and desired_edge_weight > 0: + u = desired_edge[0] + # Flag adding the edge will create a circuit before merging the two + # connected components of u and v in B + circuit = uf[u] == uf[v] + dd = {attr: desired_edge_weight} + if desired_edge[4].get(partition) is not None: + dd[partition] = desired_edge[4].get(partition) + + edmonds_add_edge(B, B_edge_index, u, v, desired_edge[2], **dd) + G[u][v][desired_edge[2]][candidate_attr] = True + uf.union(u, v) + + ################### + ### END STEP I1 ### + ################### + + ##################### + ### BEGIN STEP I2 ### + ##################### + + if circuit: + edmonds_step_I2(v, desired_edge, level) + nodes = iter(list(G.nodes())) + level += 1 + + ################### + ### END STEP I2 ### + ################### + + ##################### + ### BEGIN STEP I3 ### + ##################### + + # Create a new graph of the same class as the input graph + H = G_original.__class__() + + # Start with the branching edges in the last level. + edges = set(branchings[level][1]) + while level > 0: + level -= 1 + + # The current level is i, and we start counting from 0. + # + # We need the node at level i+1 that results from merging a circuit + # at level i. basename_0 is the first merged node and this happens + # at level 1. That is basename_0 is a node at level 1 that results + # from merging a circuit at level 0. + + merged_node = new_node_base_name + str(level) + circuit = circuits[level] + isroot, edgekey = is_root(graphs[level + 1][0], merged_node, edges) + edges.update(circuit) + + if isroot: + minedge = minedge_circuit[level] + if minedge is None: + raise Exception + + # Remove the edge in the cycle with minimum weight + edges.remove(minedge) + else: + # We have identified an edge at the next higher level that + # transitions into the merged node at this level. That edge + # transitions to some corresponding node at the current level. + # + # We want to remove an edge from the cycle that transitions + # into the corresponding node, otherwise the result would not + # be a branching. + + G, G_edge_index = graphs[level] + target = G_edge_index[edgekey][1] + for edgekey in circuit: + u, v, data = G_edge_index[edgekey] + if v == target: + break + else: + raise Exception("Couldn't find edge incoming to merged node.") + + edges.remove(edgekey) + + H.add_nodes_from(G_original) + for edgekey in edges: + u, v, d = graphs[0][1][edgekey] + dd = {attr: d[attr]} + + if preserve_attrs: + for key, value in d.items(): + if key not in [attr, candidate_attr]: + dd[key] = value + + H.add_edge(u, v, **dd) + + ################### + ### END STEP I3 ### + ################### + + return H + + +@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True, returns_graph=True) +def minimum_branching( + G, attr="weight", default=1, preserve_attrs=False, partition=None +): + for _, _, d in G.edges(data=True): + d[attr] = -d.get(attr, default) + nx._clear_cache(G) + + B = maximum_branching(G, attr, default, preserve_attrs, partition) + + for _, _, d in G.edges(data=True): + d[attr] = -d.get(attr, default) + nx._clear_cache(G) + + for _, _, d in B.edges(data=True): + d[attr] = -d.get(attr, default) + nx._clear_cache(B) + + return B + + +@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True, returns_graph=True) +def minimal_branching( + G, /, *, attr="weight", default=1, preserve_attrs=False, partition=None +): + """ + Returns a minimal branching from `G`. + + A minimal branching is a branching similar to a minimal arborescence but + without the requirement that the result is actually a spanning arborescence. + This allows minimal branchinges to be computed over graphs which may not + have arborescence (such as multiple components). + + Parameters + ---------- + G : (multi)digraph-like + The graph to be searched. + attr : str + The edge attribute used in determining optimality. + default : float + The value of the edge attribute used if an edge does not have + the attribute `attr`. + preserve_attrs : bool + If True, preserve the other attributes of the original graph (that are not + passed to `attr`) + partition : str + The key for the edge attribute containing the partition + data on the graph. Edges can be included, excluded or open using the + `EdgePartition` enum. + + Returns + ------- + B : (multi)digraph-like + A minimal branching. + """ + max_weight = -INF + min_weight = INF + for _, _, w in G.edges(data=attr, default=default): + if w > max_weight: + max_weight = w + if w < min_weight: + min_weight = w + + for _, _, d in G.edges(data=True): + # Transform the weights so that the minimum weight is larger than + # the difference between the max and min weights. This is important + # in order to prevent the edge weights from becoming negative during + # computation + d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default) + nx._clear_cache(G) + + B = maximum_branching(G, attr, default, preserve_attrs, partition) + + # Reverse the weight transformations + for _, _, d in G.edges(data=True): + d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default) + nx._clear_cache(G) + + for _, _, d in B.edges(data=True): + d[attr] = max_weight + 1 + (max_weight - min_weight) - d.get(attr, default) + nx._clear_cache(B) + + return B + + +@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True, returns_graph=True) +def maximum_spanning_arborescence( + G, attr="weight", default=1, preserve_attrs=False, partition=None +): + # In order to use the same algorithm is the maximum branching, we need to adjust + # the weights of the graph. The branching algorithm can choose to not include an + # edge if it doesn't help find a branching, mainly triggered by edges with negative + # weights. + # + # To prevent this from happening while trying to find a spanning arborescence, we + # just have to tweak the edge weights so that they are all positive and cannot + # become negative during the branching algorithm, find the maximum branching and + # then return them to their original values. + + min_weight = INF + max_weight = -INF + for _, _, w in G.edges(data=attr, default=default): + if w < min_weight: + min_weight = w + if w > max_weight: + max_weight = w + + for _, _, d in G.edges(data=True): + d[attr] = d.get(attr, default) - min_weight + 1 - (min_weight - max_weight) + nx._clear_cache(G) + + B = maximum_branching(G, attr, default, preserve_attrs, partition) + + for _, _, d in G.edges(data=True): + d[attr] = d.get(attr, default) + min_weight - 1 + (min_weight - max_weight) + nx._clear_cache(G) + + for _, _, d in B.edges(data=True): + d[attr] = d.get(attr, default) + min_weight - 1 + (min_weight - max_weight) + nx._clear_cache(B) + + if not is_arborescence(B): + raise nx.exception.NetworkXException("No maximum spanning arborescence in G.") + + return B + + +@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True, returns_graph=True) +def minimum_spanning_arborescence( + G, attr="weight", default=1, preserve_attrs=False, partition=None +): + B = minimal_branching( + G, + attr=attr, + default=default, + preserve_attrs=preserve_attrs, + partition=partition, + ) + + if not is_arborescence(B): + raise nx.exception.NetworkXException("No minimum spanning arborescence in G.") + + return B + + +docstring_branching = """ +Returns a {kind} {style} from G. + +Parameters +---------- +G : (multi)digraph-like + The graph to be searched. +attr : str + The edge attribute used to in determining optimality. +default : float + The value of the edge attribute used if an edge does not have + the attribute `attr`. +preserve_attrs : bool + If True, preserve the other attributes of the original graph (that are not + passed to `attr`) +partition : str + The key for the edge attribute containing the partition + data on the graph. Edges can be included, excluded or open using the + `EdgePartition` enum. + +Returns +------- +B : (multi)digraph-like + A {kind} {style}. +""" + +docstring_arborescence = ( + docstring_branching + + """ +Raises +------ +NetworkXException + If the graph does not contain a {kind} {style}. + +""" +) + +maximum_branching.__doc__ = docstring_branching.format( + kind="maximum", style="branching" +) + +minimum_branching.__doc__ = ( + docstring_branching.format(kind="minimum", style="branching") + + """ +See Also +-------- + minimal_branching +""" +) + +maximum_spanning_arborescence.__doc__ = docstring_arborescence.format( + kind="maximum", style="spanning arborescence" +) + +minimum_spanning_arborescence.__doc__ = docstring_arborescence.format( + kind="minimum", style="spanning arborescence" +) + + +class ArborescenceIterator: + """ + Iterate over all spanning arborescences of a graph in either increasing or + decreasing cost. + + Notes + ----- + This iterator uses the partition scheme from [1]_ (included edges, + excluded edges and open edges). It generates minimum spanning + arborescences using a modified Edmonds' Algorithm which respects the + partition of edges. For arborescences with the same weight, ties are + broken arbitrarily. + + References + ---------- + .. [1] G.K. Janssens, K. Sörensen, An algorithm to generate all spanning + trees in order of increasing cost, Pesquisa Operacional, 2005-08, + Vol. 25 (2), p. 219-229, + https://www.scielo.br/j/pope/a/XHswBwRwJyrfL88dmMwYNWp/?lang=en + """ + + @dataclass(order=True) + class Partition: + """ + This dataclass represents a partition and stores a dict with the edge + data and the weight of the minimum spanning arborescence of the + partition dict. + """ + + mst_weight: float + partition_dict: dict = field(compare=False) + + def __copy__(self): + return ArborescenceIterator.Partition( + self.mst_weight, self.partition_dict.copy() + ) + + def __init__(self, G, weight="weight", minimum=True, init_partition=None): + """ + Initialize the iterator + + Parameters + ---------- + G : nx.DiGraph + The directed graph which we need to iterate trees over + + weight : String, default = "weight" + The edge attribute used to store the weight of the edge + + minimum : bool, default = True + Return the trees in increasing order while true and decreasing order + while false. + + init_partition : tuple, default = None + In the case that certain edges have to be included or excluded from + the arborescences, `init_partition` should be in the form + `(included_edges, excluded_edges)` where each edges is a + `(u, v)`-tuple inside an iterable such as a list or set. + + """ + self.G = G.copy() + self.weight = weight + self.minimum = minimum + self.method = ( + minimum_spanning_arborescence if minimum else maximum_spanning_arborescence + ) + # Randomly create a key for an edge attribute to hold the partition data + self.partition_key = ( + "ArborescenceIterators super secret partition attribute name" + ) + if init_partition is not None: + partition_dict = {} + for e in init_partition[0]: + partition_dict[e] = nx.EdgePartition.INCLUDED + for e in init_partition[1]: + partition_dict[e] = nx.EdgePartition.EXCLUDED + self.init_partition = ArborescenceIterator.Partition(0, partition_dict) + else: + self.init_partition = None + + def __iter__(self): + """ + Returns + ------- + ArborescenceIterator + The iterator object for this graph + """ + self.partition_queue = PriorityQueue() + self._clear_partition(self.G) + + # Write the initial partition if it exists. + if self.init_partition is not None: + self._write_partition(self.init_partition) + + mst_weight = self.method( + self.G, + self.weight, + partition=self.partition_key, + preserve_attrs=True, + ).size(weight=self.weight) + + self.partition_queue.put( + self.Partition( + mst_weight if self.minimum else -mst_weight, + ( + {} + if self.init_partition is None + else self.init_partition.partition_dict + ), + ) + ) + + return self + + def __next__(self): + """ + Returns + ------- + (multi)Graph + The spanning tree of next greatest weight, which ties broken + arbitrarily. + """ + if self.partition_queue.empty(): + del self.G, self.partition_queue + raise StopIteration + + partition = self.partition_queue.get() + self._write_partition(partition) + next_arborescence = self.method( + self.G, + self.weight, + partition=self.partition_key, + preserve_attrs=True, + ) + self._partition(partition, next_arborescence) + + self._clear_partition(next_arborescence) + return next_arborescence + + def _partition(self, partition, partition_arborescence): + """ + Create new partitions based of the minimum spanning tree of the + current minimum partition. + + Parameters + ---------- + partition : Partition + The Partition instance used to generate the current minimum spanning + tree. + partition_arborescence : nx.Graph + The minimum spanning arborescence of the input partition. + """ + # create two new partitions with the data from the input partition dict + p1 = self.Partition(0, partition.partition_dict.copy()) + p2 = self.Partition(0, partition.partition_dict.copy()) + for e in partition_arborescence.edges: + # determine if the edge was open or included + if e not in partition.partition_dict: + # This is an open edge + p1.partition_dict[e] = nx.EdgePartition.EXCLUDED + p2.partition_dict[e] = nx.EdgePartition.INCLUDED + + self._write_partition(p1) + try: + p1_mst = self.method( + self.G, + self.weight, + partition=self.partition_key, + preserve_attrs=True, + ) + + p1_mst_weight = p1_mst.size(weight=self.weight) + p1.mst_weight = p1_mst_weight if self.minimum else -p1_mst_weight + self.partition_queue.put(p1.__copy__()) + except nx.NetworkXException: + pass + + p1.partition_dict = p2.partition_dict.copy() + + def _write_partition(self, partition): + """ + Writes the desired partition into the graph to calculate the minimum + spanning tree. Also, if one incoming edge is included, mark all others + as excluded so that if that vertex is merged during Edmonds' algorithm + we cannot still pick another of that vertex's included edges. + + Parameters + ---------- + partition : Partition + A Partition dataclass describing a partition on the edges of the + graph. + """ + for u, v, d in self.G.edges(data=True): + if (u, v) in partition.partition_dict: + d[self.partition_key] = partition.partition_dict[(u, v)] + else: + d[self.partition_key] = nx.EdgePartition.OPEN + nx._clear_cache(self.G) + + for n in self.G: + included_count = 0 + excluded_count = 0 + for u, v, d in self.G.in_edges(nbunch=n, data=True): + if d.get(self.partition_key) == nx.EdgePartition.INCLUDED: + included_count += 1 + elif d.get(self.partition_key) == nx.EdgePartition.EXCLUDED: + excluded_count += 1 + # Check that if there is an included edges, all other incoming ones + # are excluded. If not fix it! + if included_count == 1 and excluded_count != self.G.in_degree(n) - 1: + for u, v, d in self.G.in_edges(nbunch=n, data=True): + if d.get(self.partition_key) != nx.EdgePartition.INCLUDED: + d[self.partition_key] = nx.EdgePartition.EXCLUDED + + def _clear_partition(self, G): + """ + Removes partition data from the graph + """ + for u, v, d in G.edges(data=True): + if self.partition_key in d: + del d[self.partition_key] + nx._clear_cache(self.G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/coding.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/coding.py new file mode 100644 index 0000000..276c87d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/coding.py @@ -0,0 +1,413 @@ +"""Functions for encoding and decoding trees. + +Since a tree is a highly restricted form of graph, it can be represented +concisely in several ways. This module includes functions for encoding +and decoding trees in the form of nested tuples and Prüfer +sequences. The former requires a rooted tree, whereas the latter can be +applied to unrooted trees. Furthermore, there is a bijection from Prüfer +sequences to labeled trees. + +""" + +from collections import Counter +from itertools import chain + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "from_nested_tuple", + "from_prufer_sequence", + "NotATree", + "to_nested_tuple", + "to_prufer_sequence", +] + + +class NotATree(nx.NetworkXException): + """Raised when a function expects a tree (that is, a connected + undirected graph with no cycles) but gets a non-tree graph as input + instead. + + """ + + +@not_implemented_for("directed") +@nx._dispatchable(graphs="T") +def to_nested_tuple(T, root, canonical_form=False): + """Returns a nested tuple representation of the given tree. + + The nested tuple representation of a tree is defined + recursively. The tree with one node and no edges is represented by + the empty tuple, ``()``. A tree with ``k`` subtrees is represented + by a tuple of length ``k`` in which each element is the nested tuple + representation of a subtree. + + Parameters + ---------- + T : NetworkX graph + An undirected graph object representing a tree. + + root : node + The node in ``T`` to interpret as the root of the tree. + + canonical_form : bool + If ``True``, each tuple is sorted so that the function returns + a canonical form for rooted trees. This means "lighter" subtrees + will appear as nested tuples before "heavier" subtrees. In this + way, each isomorphic rooted tree has the same nested tuple + representation. + + Returns + ------- + tuple + A nested tuple representation of the tree. + + Notes + ----- + This function is *not* the inverse of :func:`from_nested_tuple`; the + only guarantee is that the rooted trees are isomorphic. + + See also + -------- + from_nested_tuple + to_prufer_sequence + + Examples + -------- + The tree need not be a balanced binary tree:: + + >>> T = nx.Graph() + >>> T.add_edges_from([(0, 1), (0, 2), (0, 3)]) + >>> T.add_edges_from([(1, 4), (1, 5)]) + >>> T.add_edges_from([(3, 6), (3, 7)]) + >>> root = 0 + >>> nx.to_nested_tuple(T, root) + (((), ()), (), ((), ())) + + Continuing the above example, if ``canonical_form`` is ``True``, the + nested tuples will be sorted:: + + >>> nx.to_nested_tuple(T, root, canonical_form=True) + ((), ((), ()), ((), ())) + + Even the path graph can be interpreted as a tree:: + + >>> T = nx.path_graph(4) + >>> root = 0 + >>> nx.to_nested_tuple(T, root) + ((((),),),) + + """ + + def _make_tuple(T, root, _parent): + """Recursively compute the nested tuple representation of the + given rooted tree. + + ``_parent`` is the parent node of ``root`` in the supertree in + which ``T`` is a subtree, or ``None`` if ``root`` is the root of + the supertree. This argument is used to determine which + neighbors of ``root`` are children and which is the parent. + + """ + # Get the neighbors of `root` that are not the parent node. We + # are guaranteed that `root` is always in `T` by construction. + children = set(T[root]) - {_parent} + if len(children) == 0: + return () + nested = (_make_tuple(T, v, root) for v in children) + if canonical_form: + nested = sorted(nested) + return tuple(nested) + + # Do some sanity checks on the input. + if not nx.is_tree(T): + raise nx.NotATree("provided graph is not a tree") + if root not in T: + raise nx.NodeNotFound(f"Graph {T} contains no node {root}") + + return _make_tuple(T, root, None) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_nested_tuple(sequence, sensible_relabeling=False): + """Returns the rooted tree corresponding to the given nested tuple. + + The nested tuple representation of a tree is defined + recursively. The tree with one node and no edges is represented by + the empty tuple, ``()``. A tree with ``k`` subtrees is represented + by a tuple of length ``k`` in which each element is the nested tuple + representation of a subtree. + + Parameters + ---------- + sequence : tuple + A nested tuple representing a rooted tree. + + sensible_relabeling : bool + Whether to relabel the nodes of the tree so that nodes are + labeled in increasing order according to their breadth-first + search order from the root node. + + Returns + ------- + NetworkX graph + The tree corresponding to the given nested tuple, whose root + node is node 0. If ``sensible_labeling`` is ``True``, nodes will + be labeled in breadth-first search order starting from the root + node. + + Notes + ----- + This function is *not* the inverse of :func:`to_nested_tuple`; the + only guarantee is that the rooted trees are isomorphic. + + See also + -------- + to_nested_tuple + from_prufer_sequence + + Examples + -------- + Sensible relabeling ensures that the nodes are labeled from the root + starting at 0:: + + >>> balanced = (((), ()), ((), ())) + >>> T = nx.from_nested_tuple(balanced, sensible_relabeling=True) + >>> edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)] + >>> all((u, v) in T.edges() or (v, u) in T.edges() for (u, v) in edges) + True + + """ + + def _make_tree(sequence): + """Recursively creates a tree from the given sequence of nested + tuples. + + This function employs the :func:`~networkx.tree.join` function + to recursively join subtrees into a larger tree. + + """ + # The empty sequence represents the empty tree, which is the + # (unique) graph with a single node. We mark the single node + # with an attribute that indicates that it is the root of the + # graph. + if len(sequence) == 0: + return nx.empty_graph(1) + # For a nonempty sequence, get the subtrees for each child + # sequence and join all the subtrees at their roots. After + # joining the subtrees, the root is node 0. + return nx.tree.join_trees([(_make_tree(child), 0) for child in sequence]) + + # Make the tree and remove the `is_root` node attribute added by the + # helper function. + T = _make_tree(sequence) + if sensible_relabeling: + # Relabel the nodes according to their breadth-first search + # order, starting from the root node (that is, the node 0). + bfs_nodes = chain([0], (v for u, v in nx.bfs_edges(T, 0))) + labels = {v: i for i, v in enumerate(bfs_nodes)} + # We would like to use `copy=False`, but `relabel_nodes` doesn't + # allow a relabel mapping that can't be topologically sorted. + T = nx.relabel_nodes(T, labels) + return T + + +@not_implemented_for("directed") +@nx._dispatchable(graphs="T") +def to_prufer_sequence(T): + r"""Returns the Prüfer sequence of the given tree. + + A *Prüfer sequence* is a list of *n* - 2 numbers between 0 and + *n* - 1, inclusive. The tree corresponding to a given Prüfer + sequence can be recovered by repeatedly joining a node in the + sequence with a node with the smallest potential degree according to + the sequence. + + Parameters + ---------- + T : NetworkX graph + An undirected graph object representing a tree. + + Returns + ------- + list + The Prüfer sequence of the given tree. + + Raises + ------ + NetworkXPointlessConcept + If the number of nodes in `T` is less than two. + + NotATree + If `T` is not a tree. + + KeyError + If the set of nodes in `T` is not {0, …, *n* - 1}. + + Notes + ----- + There is a bijection from labeled trees to Prüfer sequences. This + function is the inverse of the :func:`from_prufer_sequence` + function. + + Sometimes Prüfer sequences use nodes labeled from 1 to *n* instead + of from 0 to *n* - 1. This function requires nodes to be labeled in + the latter form. You can use :func:`~networkx.relabel_nodes` to + relabel the nodes of your tree to the appropriate format. + + This implementation is from [1]_ and has a running time of + $O(n)$. + + See also + -------- + to_nested_tuple + from_prufer_sequence + + References + ---------- + .. [1] Wang, Xiaodong, Lei Wang, and Yingjie Wu. + "An optimal algorithm for Prufer codes." + *Journal of Software Engineering and Applications* 2.02 (2009): 111. + + + Examples + -------- + There is a bijection between Prüfer sequences and labeled trees, so + this function is the inverse of the :func:`from_prufer_sequence` + function: + + >>> edges = [(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)] + >>> tree = nx.Graph(edges) + >>> sequence = nx.to_prufer_sequence(tree) + >>> sequence + [3, 3, 3, 4] + >>> tree2 = nx.from_prufer_sequence(sequence) + >>> list(tree2.edges()) == edges + True + + """ + # Perform some sanity checks on the input. + n = len(T) + if n < 2: + msg = "Prüfer sequence undefined for trees with fewer than two nodes" + raise nx.NetworkXPointlessConcept(msg) + if not nx.is_tree(T): + raise nx.NotATree("provided graph is not a tree") + if set(T) != set(range(n)): + raise KeyError("tree must have node labels {0, ..., n - 1}") + + degree = dict(T.degree()) + + def parents(u): + return next(v for v in T[u] if degree[v] > 1) + + index = u = next(k for k in range(n) if degree[k] == 1) + result = [] + for i in range(n - 2): + v = parents(u) + result.append(v) + degree[v] -= 1 + if v < index and degree[v] == 1: + u = v + else: + index = u = next(k for k in range(index + 1, n) if degree[k] == 1) + return result + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_prufer_sequence(sequence): + r"""Returns the tree corresponding to the given Prüfer sequence. + + A *Prüfer sequence* is a list of *n* - 2 numbers between 0 and + *n* - 1, inclusive. The tree corresponding to a given Prüfer + sequence can be recovered by repeatedly joining a node in the + sequence with a node with the smallest potential degree according to + the sequence. + + Parameters + ---------- + sequence : list + A Prüfer sequence, which is a list of *n* - 2 integers between + zero and *n* - 1, inclusive. + + Returns + ------- + NetworkX graph + The tree corresponding to the given Prüfer sequence. + + Raises + ------ + NetworkXError + If the Prüfer sequence is not valid. + + Notes + ----- + There is a bijection from labeled trees to Prüfer sequences. This + function is the inverse of the :func:`from_prufer_sequence` function. + + Sometimes Prüfer sequences use nodes labeled from 1 to *n* instead + of from 0 to *n* - 1. This function requires nodes to be labeled in + the latter form. You can use :func:`networkx.relabel_nodes` to + relabel the nodes of your tree to the appropriate format. + + This implementation is from [1]_ and has a running time of + $O(n)$. + + References + ---------- + .. [1] Wang, Xiaodong, Lei Wang, and Yingjie Wu. + "An optimal algorithm for Prufer codes." + *Journal of Software Engineering and Applications* 2.02 (2009): 111. + + + See also + -------- + from_nested_tuple + to_prufer_sequence + + Examples + -------- + There is a bijection between Prüfer sequences and labeled trees, so + this function is the inverse of the :func:`to_prufer_sequence` + function: + + >>> edges = [(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)] + >>> tree = nx.Graph(edges) + >>> sequence = nx.to_prufer_sequence(tree) + >>> sequence + [3, 3, 3, 4] + >>> tree2 = nx.from_prufer_sequence(sequence) + >>> list(tree2.edges()) == edges + True + + """ + n = len(sequence) + 2 + # `degree` stores the remaining degree (plus one) for each node. The + # degree of a node in the decoded tree is one more than the number + # of times it appears in the code. + degree = Counter(chain(sequence, range(n))) + T = nx.empty_graph(n) + # `not_orphaned` is the set of nodes that have a parent in the + # tree. After the loop, there should be exactly two nodes that are + # not in this set. + not_orphaned = set() + index = u = next(k for k in range(n) if degree[k] == 1) + for v in sequence: + # check the validity of the prufer sequence + if v < 0 or v > n - 1: + raise nx.NetworkXError( + f"Invalid Prufer sequence: Values must be between 0 and {n - 1}, got {v}" + ) + T.add_edge(u, v) + not_orphaned.add(u) + degree[v] -= 1 + if v < index and degree[v] == 1: + u = v + else: + index = u = next(k for k in range(index + 1, n) if degree[k] == 1) + # At this point, there must be exactly two orphaned nodes; join them. + orphans = set(T) - not_orphaned + u, v = orphans + T.add_edge(u, v) + return T diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/decomposition.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/decomposition.py new file mode 100644 index 0000000..c8b8f24 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/decomposition.py @@ -0,0 +1,88 @@ +r"""Function for computing a junction tree of a graph.""" + +from itertools import combinations + +import networkx as nx +from networkx.algorithms import chordal_graph_cliques, complete_to_chordal_graph, moral +from networkx.utils import not_implemented_for + +__all__ = ["junction_tree"] + + +@not_implemented_for("multigraph") +@nx._dispatchable(returns_graph=True) +def junction_tree(G): + r"""Returns a junction tree of a given graph. + + A junction tree (or clique tree) is constructed from a (un)directed graph G. + The tree is constructed based on a moralized and triangulated version of G. + The tree's nodes consist of maximal cliques and sepsets of the revised graph. + The sepset of two cliques is the intersection of the nodes of these cliques, + e.g. the sepset of (A,B,C) and (A,C,E,F) is (A,C). These nodes are often called + "variables" in this literature. The tree is bipartite with each sepset + connected to its two cliques. + + Junction Trees are not unique as the order of clique consideration determines + which sepsets are included. + + The junction tree algorithm consists of five steps [1]_: + + 1. Moralize the graph + 2. Triangulate the graph + 3. Find maximal cliques + 4. Build the tree from cliques, connecting cliques with shared + nodes, set edge-weight to number of shared variables + 5. Find maximum spanning tree + + + Parameters + ---------- + G : networkx.Graph + Directed or undirected graph. + + Returns + ------- + junction_tree : networkx.Graph + The corresponding junction tree of `G`. + + Raises + ------ + NetworkXNotImplemented + Raised if `G` is an instance of `MultiGraph` or `MultiDiGraph`. + + References + ---------- + .. [1] Junction tree algorithm: + https://en.wikipedia.org/wiki/Junction_tree_algorithm + + .. [2] Finn V. Jensen and Frank Jensen. 1994. Optimal + junction trees. In Proceedings of the Tenth international + conference on Uncertainty in artificial intelligence (UAI’94). + Morgan Kaufmann Publishers Inc., San Francisco, CA, USA, 360–366. + """ + + clique_graph = nx.Graph() + + if G.is_directed(): + G = moral.moral_graph(G) + chordal_graph, _ = complete_to_chordal_graph(G) + + cliques = [tuple(sorted(i)) for i in chordal_graph_cliques(chordal_graph)] + clique_graph.add_nodes_from(cliques, type="clique") + + for edge in combinations(cliques, 2): + set_edge_0 = set(edge[0]) + set_edge_1 = set(edge[1]) + if not set_edge_0.isdisjoint(set_edge_1): + sepset = tuple(sorted(set_edge_0.intersection(set_edge_1))) + clique_graph.add_edge(edge[0], edge[1], weight=len(sepset), sepset=sepset) + + junction_tree = nx.maximum_spanning_tree(clique_graph) + + for edge in list(junction_tree.edges(data=True)): + junction_tree.add_node(edge[2]["sepset"], type="sepset") + junction_tree.add_edge(edge[0], edge[2]["sepset"]) + junction_tree.add_edge(edge[1], edge[2]["sepset"]) + junction_tree.remove_edge(edge[0], edge[1]) + + return junction_tree diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/mst.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/mst.py new file mode 100644 index 0000000..83402ec --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/mst.py @@ -0,0 +1,1283 @@ +""" +Algorithms for calculating min/max spanning trees/forests. + +""" + +from dataclasses import dataclass, field +from enum import Enum +from heapq import heappop, heappush +from itertools import count +from math import isnan +from operator import itemgetter +from queue import PriorityQueue + +import networkx as nx +from networkx.utils import UnionFind, not_implemented_for, py_random_state + +__all__ = [ + "minimum_spanning_edges", + "maximum_spanning_edges", + "minimum_spanning_tree", + "maximum_spanning_tree", + "number_of_spanning_trees", + "random_spanning_tree", + "partition_spanning_tree", + "EdgePartition", + "SpanningTreeIterator", +] + + +class EdgePartition(Enum): + """ + An enum to store the state of an edge partition. The enum is written to the + edges of a graph before being pasted to `kruskal_mst_edges`. Options are: + + - EdgePartition.OPEN + - EdgePartition.INCLUDED + - EdgePartition.EXCLUDED + """ + + OPEN = 0 + INCLUDED = 1 + EXCLUDED = 2 + + +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") +def boruvka_mst_edges( + G, minimum=True, weight="weight", keys=False, data=True, ignore_nan=False +): + """Iterate over edges of a Borůvka's algorithm min/max spanning tree. + + Parameters + ---------- + G : NetworkX Graph + The edges of `G` must have distinct weights, + otherwise the edges may not form a tree. + + minimum : bool (default: True) + Find the minimum (True) or maximum (False) spanning tree. + + weight : string (default: 'weight') + The name of the edge attribute holding the edge weights. + + keys : bool (default: True) + This argument is ignored since this function is not + implemented for multigraphs; it exists only for consistency + with the other minimum spanning tree functions. + + data : bool (default: True) + Flag for whether to yield edge attribute dicts. + If True, yield edges `(u, v, d)`, where `d` is the attribute dict. + If False, yield edges `(u, v)`. + + ignore_nan : bool (default: False) + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + + """ + # Initialize a forest, assuming initially that it is the discrete + # partition of the nodes of the graph. + forest = UnionFind(G) + + def best_edge(component): + """Returns the optimum (minimum or maximum) edge on the edge + boundary of the given set of nodes. + + A return value of ``None`` indicates an empty boundary. + + """ + sign = 1 if minimum else -1 + minwt = float("inf") + boundary = None + for e in nx.edge_boundary(G, component, data=True): + wt = e[-1].get(weight, 1) * sign + if isnan(wt): + if ignore_nan: + continue + msg = f"NaN found as an edge weight. Edge {e}" + raise ValueError(msg) + if wt < minwt: + minwt = wt + boundary = e + return boundary + + # Determine the optimum edge in the edge boundary of each component + # in the forest. + best_edges = (best_edge(component) for component in forest.to_sets()) + best_edges = [edge for edge in best_edges if edge is not None] + # If each entry was ``None``, that means the graph was disconnected, + # so we are done generating the forest. + while best_edges: + # Determine the optimum edge in the edge boundary of each + # component in the forest. + # + # This must be a sequence, not an iterator. In this list, the + # same edge may appear twice, in different orientations (but + # that's okay, since a union operation will be called on the + # endpoints the first time it is seen, but not the second time). + # + # Any ``None`` indicates that the edge boundary for that + # component was empty, so that part of the forest has been + # completed. + # + # TODO This can be parallelized, both in the outer loop over + # each component in the forest and in the computation of the + # minimum. (Same goes for the identical lines outside the loop.) + best_edges = (best_edge(component) for component in forest.to_sets()) + best_edges = [edge for edge in best_edges if edge is not None] + # Join trees in the forest using the best edges, and yield that + # edge, since it is part of the spanning tree. + # + # TODO This loop can be parallelized, to an extent (the union + # operation must be atomic). + for u, v, d in best_edges: + if forest[u] != forest[v]: + if data: + yield u, v, d + else: + yield u, v + forest.union(u, v) + + +@nx._dispatchable( + edge_attrs={"weight": None, "partition": None}, preserve_edge_attrs="data" +) +def kruskal_mst_edges( + G, minimum, weight="weight", keys=True, data=True, ignore_nan=False, partition=None +): + """ + Iterate over edge of a Kruskal's algorithm min/max spanning tree. + + Parameters + ---------- + G : NetworkX Graph + The graph holding the tree of interest. + + minimum : bool (default: True) + Find the minimum (True) or maximum (False) spanning tree. + + weight : string (default: 'weight') + The name of the edge attribute holding the edge weights. + + keys : bool (default: True) + If `G` is a multigraph, `keys` controls whether edge keys ar yielded. + Otherwise `keys` is ignored. + + data : bool (default: True) + Flag for whether to yield edge attribute dicts. + If True, yield edges `(u, v, d)`, where `d` is the attribute dict. + If False, yield edges `(u, v)`. + + ignore_nan : bool (default: False) + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + + partition : string (default: None) + The name of the edge attribute holding the partition data, if it exists. + Partition data is written to the edges using the `EdgePartition` enum. + If a partition exists, all included edges and none of the excluded edges + will appear in the final tree. Open edges may or may not be used. + + Yields + ------ + edge tuple + The edges as discovered by Kruskal's method. Each edge can + take the following forms: `(u, v)`, `(u, v, d)` or `(u, v, k, d)` + depending on the `key` and `data` parameters + """ + subtrees = UnionFind() + if G.is_multigraph(): + edges = G.edges(keys=True, data=True) + else: + edges = G.edges(data=True) + + """ + Sort the edges of the graph with respect to the partition data. + Edges are returned in the following order: + + * Included edges + * Open edges from smallest to largest weight + * Excluded edges + """ + included_edges = [] + open_edges = [] + for e in edges: + d = e[-1] + wt = d.get(weight, 1) + if isnan(wt): + if ignore_nan: + continue + raise ValueError(f"NaN found as an edge weight. Edge {e}") + + edge = (wt,) + e + if d.get(partition) == EdgePartition.INCLUDED: + included_edges.append(edge) + elif d.get(partition) == EdgePartition.EXCLUDED: + continue + else: + open_edges.append(edge) + + if minimum: + sorted_open_edges = sorted(open_edges, key=itemgetter(0)) + else: + sorted_open_edges = sorted(open_edges, key=itemgetter(0), reverse=True) + + # Condense the lists into one + included_edges.extend(sorted_open_edges) + sorted_edges = included_edges + del open_edges, sorted_open_edges, included_edges + + # Multigraphs need to handle edge keys in addition to edge data. + if G.is_multigraph(): + for wt, u, v, k, d in sorted_edges: + if subtrees[u] != subtrees[v]: + if keys: + if data: + yield u, v, k, d + else: + yield u, v, k + else: + if data: + yield u, v, d + else: + yield u, v + subtrees.union(u, v) + else: + for wt, u, v, d in sorted_edges: + if subtrees[u] != subtrees[v]: + if data: + yield u, v, d + else: + yield u, v + subtrees.union(u, v) + + +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") +def prim_mst_edges(G, minimum, weight="weight", keys=True, data=True, ignore_nan=False): + """Iterate over edges of Prim's algorithm min/max spanning tree. + + Parameters + ---------- + G : NetworkX Graph + The graph holding the tree of interest. + + minimum : bool (default: True) + Find the minimum (True) or maximum (False) spanning tree. + + weight : string (default: 'weight') + The name of the edge attribute holding the edge weights. + + keys : bool (default: True) + If `G` is a multigraph, `keys` controls whether edge keys ar yielded. + Otherwise `keys` is ignored. + + data : bool (default: True) + Flag for whether to yield edge attribute dicts. + If True, yield edges `(u, v, d)`, where `d` is the attribute dict. + If False, yield edges `(u, v)`. + + ignore_nan : bool (default: False) + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + + """ + is_multigraph = G.is_multigraph() + + nodes = set(G) + c = count() + + sign = 1 if minimum else -1 + + while nodes: + u = nodes.pop() + frontier = [] + visited = {u} + if is_multigraph: + for v, keydict in G.adj[u].items(): + for k, d in keydict.items(): + wt = d.get(weight, 1) * sign + if isnan(wt): + if ignore_nan: + continue + msg = f"NaN found as an edge weight. Edge {(u, v, k, d)}" + raise ValueError(msg) + heappush(frontier, (wt, next(c), u, v, k, d)) + else: + for v, d in G.adj[u].items(): + wt = d.get(weight, 1) * sign + if isnan(wt): + if ignore_nan: + continue + msg = f"NaN found as an edge weight. Edge {(u, v, d)}" + raise ValueError(msg) + heappush(frontier, (wt, next(c), u, v, d)) + while nodes and frontier: + if is_multigraph: + W, _, u, v, k, d = heappop(frontier) + else: + W, _, u, v, d = heappop(frontier) + if v in visited or v not in nodes: + continue + # Multigraphs need to handle edge keys in addition to edge data. + if is_multigraph and keys: + if data: + yield u, v, k, d + else: + yield u, v, k + else: + if data: + yield u, v, d + else: + yield u, v + # update frontier + visited.add(v) + nodes.discard(v) + if is_multigraph: + for w, keydict in G.adj[v].items(): + if w in visited: + continue + for k2, d2 in keydict.items(): + new_weight = d2.get(weight, 1) * sign + if isnan(new_weight): + if ignore_nan: + continue + msg = f"NaN found as an edge weight. Edge {(v, w, k2, d2)}" + raise ValueError(msg) + heappush(frontier, (new_weight, next(c), v, w, k2, d2)) + else: + for w, d2 in G.adj[v].items(): + if w in visited: + continue + new_weight = d2.get(weight, 1) * sign + if isnan(new_weight): + if ignore_nan: + continue + msg = f"NaN found as an edge weight. Edge {(v, w, d2)}" + raise ValueError(msg) + heappush(frontier, (new_weight, next(c), v, w, d2)) + + +ALGORITHMS = { + "boruvka": boruvka_mst_edges, + "borůvka": boruvka_mst_edges, + "kruskal": kruskal_mst_edges, + "prim": prim_mst_edges, +} + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") +def minimum_spanning_edges( + G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False +): + """Generate edges in a minimum spanning forest of an undirected + weighted graph. + + A minimum spanning tree is a subgraph of the graph (a tree) + with the minimum sum of edge weights. A spanning forest is a + union of the spanning trees for each connected component of the graph. + + Parameters + ---------- + G : undirected Graph + An undirected graph. If `G` is connected, then the algorithm finds a + spanning tree. Otherwise, a spanning forest is found. + + algorithm : string + The algorithm to use when finding a minimum spanning tree. Valid + choices are 'kruskal', 'prim', or 'boruvka'. The default is 'kruskal'. + + weight : string + Edge data key to use for weight (default 'weight'). + + keys : bool + Whether to yield edge key in multigraphs in addition to the edge. + If `G` is not a multigraph, this is ignored. + + data : bool, optional + If True yield the edge data along with the edge. + + ignore_nan : bool (default: False) + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + + Returns + ------- + edges : iterator + An iterator over edges in a maximum spanning tree of `G`. + Edges connecting nodes `u` and `v` are represented as tuples: + `(u, v, k, d)` or `(u, v, k)` or `(u, v, d)` or `(u, v)` + + If `G` is a multigraph, `keys` indicates whether the edge key `k` will + be reported in the third position in the edge tuple. `data` indicates + whether the edge datadict `d` will appear at the end of the edge tuple. + + If `G` is not a multigraph, the tuples are `(u, v, d)` if `data` is True + or `(u, v)` if `data` is False. + + Examples + -------- + >>> from networkx.algorithms import tree + + Find minimum spanning edges by Kruskal's algorithm + + >>> G = nx.cycle_graph(4) + >>> G.add_edge(0, 3, weight=2) + >>> mst = tree.minimum_spanning_edges(G, algorithm="kruskal", data=False) + >>> edgelist = list(mst) + >>> sorted(sorted(e) for e in edgelist) + [[0, 1], [1, 2], [2, 3]] + + Find minimum spanning edges by Prim's algorithm + + >>> G = nx.cycle_graph(4) + >>> G.add_edge(0, 3, weight=2) + >>> mst = tree.minimum_spanning_edges(G, algorithm="prim", data=False) + >>> edgelist = list(mst) + >>> sorted(sorted(e) for e in edgelist) + [[0, 1], [1, 2], [2, 3]] + + Notes + ----- + For Borůvka's algorithm, each edge must have a weight attribute, and + each edge weight must be distinct. + + For the other algorithms, if the graph edges do not have a weight + attribute a default weight of 1 will be used. + + Modified code from David Eppstein, April 2006 + http://www.ics.uci.edu/~eppstein/PADS/ + + """ + try: + algo = ALGORITHMS[algorithm] + except KeyError as err: + msg = f"{algorithm} is not a valid choice for an algorithm." + raise ValueError(msg) from err + + return algo( + G, minimum=True, weight=weight, keys=keys, data=data, ignore_nan=ignore_nan + ) + + +@not_implemented_for("directed") +@nx._dispatchable(edge_attrs="weight", preserve_edge_attrs="data") +def maximum_spanning_edges( + G, algorithm="kruskal", weight="weight", keys=True, data=True, ignore_nan=False +): + """Generate edges in a maximum spanning forest of an undirected + weighted graph. + + A maximum spanning tree is a subgraph of the graph (a tree) + with the maximum possible sum of edge weights. A spanning forest is a + union of the spanning trees for each connected component of the graph. + + Parameters + ---------- + G : undirected Graph + An undirected graph. If `G` is connected, then the algorithm finds a + spanning tree. Otherwise, a spanning forest is found. + + algorithm : string + The algorithm to use when finding a maximum spanning tree. Valid + choices are 'kruskal', 'prim', or 'boruvka'. The default is 'kruskal'. + + weight : string + Edge data key to use for weight (default 'weight'). + + keys : bool + Whether to yield edge key in multigraphs in addition to the edge. + If `G` is not a multigraph, this is ignored. + + data : bool, optional + If True yield the edge data along with the edge. + + ignore_nan : bool (default: False) + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + + Returns + ------- + edges : iterator + An iterator over edges in a maximum spanning tree of `G`. + Edges connecting nodes `u` and `v` are represented as tuples: + `(u, v, k, d)` or `(u, v, k)` or `(u, v, d)` or `(u, v)` + + If `G` is a multigraph, `keys` indicates whether the edge key `k` will + be reported in the third position in the edge tuple. `data` indicates + whether the edge datadict `d` will appear at the end of the edge tuple. + + If `G` is not a multigraph, the tuples are `(u, v, d)` if `data` is True + or `(u, v)` if `data` is False. + + Examples + -------- + >>> from networkx.algorithms import tree + + Find maximum spanning edges by Kruskal's algorithm + + >>> G = nx.cycle_graph(4) + >>> G.add_edge(0, 3, weight=2) + >>> mst = tree.maximum_spanning_edges(G, algorithm="kruskal", data=False) + >>> edgelist = list(mst) + >>> sorted(sorted(e) for e in edgelist) + [[0, 1], [0, 3], [1, 2]] + + Find maximum spanning edges by Prim's algorithm + + >>> G = nx.cycle_graph(4) + >>> G.add_edge(0, 3, weight=2) # assign weight 2 to edge 0-3 + >>> mst = tree.maximum_spanning_edges(G, algorithm="prim", data=False) + >>> edgelist = list(mst) + >>> sorted(sorted(e) for e in edgelist) + [[0, 1], [0, 3], [2, 3]] + + Notes + ----- + For Borůvka's algorithm, each edge must have a weight attribute, and + each edge weight must be distinct. + + For the other algorithms, if the graph edges do not have a weight + attribute a default weight of 1 will be used. + + Modified code from David Eppstein, April 2006 + http://www.ics.uci.edu/~eppstein/PADS/ + """ + try: + algo = ALGORITHMS[algorithm] + except KeyError as err: + msg = f"{algorithm} is not a valid choice for an algorithm." + raise ValueError(msg) from err + + return algo( + G, minimum=False, weight=weight, keys=keys, data=data, ignore_nan=ignore_nan + ) + + +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def minimum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): + """Returns a minimum spanning tree or forest on an undirected graph `G`. + + Parameters + ---------- + G : undirected graph + An undirected graph. If `G` is connected, then the algorithm finds a + spanning tree. Otherwise, a spanning forest is found. + + weight : str + Data key to use for edge weights. + + algorithm : string + The algorithm to use when finding a minimum spanning tree. Valid + choices are 'kruskal', 'prim', or 'boruvka'. The default is + 'kruskal'. + + ignore_nan : bool (default: False) + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + + Returns + ------- + G : NetworkX Graph + A minimum spanning tree or forest. + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> G.add_edge(0, 3, weight=2) + >>> T = nx.minimum_spanning_tree(G) + >>> sorted(T.edges(data=True)) + [(0, 1, {}), (1, 2, {}), (2, 3, {})] + + + Notes + ----- + For Borůvka's algorithm, each edge must have a weight attribute, and + each edge weight must be distinct. + + For the other algorithms, if the graph edges do not have a weight + attribute a default weight of 1 will be used. + + There may be more than one tree with the same minimum or maximum weight. + See :mod:`networkx.tree.recognition` for more detailed definitions. + + Isolated nodes with self-loops are in the tree as edgeless isolated nodes. + + """ + edges = minimum_spanning_edges( + G, algorithm, weight, keys=True, data=True, ignore_nan=ignore_nan + ) + T = G.__class__() # Same graph class as G + T.graph.update(G.graph) + T.add_nodes_from(G.nodes.items()) + T.add_edges_from(edges) + return T + + +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def partition_spanning_tree( + G, minimum=True, weight="weight", partition="partition", ignore_nan=False +): + """ + Find a spanning tree while respecting a partition of edges. + + Edges can be flagged as either `INCLUDED` which are required to be in the + returned tree, `EXCLUDED`, which cannot be in the returned tree and `OPEN`. + + This is used in the SpanningTreeIterator to create new partitions following + the algorithm of Sörensen and Janssens [1]_. + + Parameters + ---------- + G : undirected graph + An undirected graph. + + minimum : bool (default: True) + Determines whether the returned tree is the minimum spanning tree of + the partition of the maximum one. + + weight : str + Data key to use for edge weights. + + partition : str + The key for the edge attribute containing the partition + data on the graph. Edges can be included, excluded or open using the + `EdgePartition` enum. + + ignore_nan : bool (default: False) + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + + + Returns + ------- + G : NetworkX Graph + A minimum spanning tree using all of the included edges in the graph and + none of the excluded edges. + + References + ---------- + .. [1] G.K. Janssens, K. Sörensen, An algorithm to generate all spanning + trees in order of increasing cost, Pesquisa Operacional, 2005-08, + Vol. 25 (2), p. 219-229, + https://www.scielo.br/j/pope/a/XHswBwRwJyrfL88dmMwYNWp/?lang=en + """ + edges = kruskal_mst_edges( + G, + minimum, + weight, + keys=True, + data=True, + ignore_nan=ignore_nan, + partition=partition, + ) + T = G.__class__() # Same graph class as G + T.graph.update(G.graph) + T.add_nodes_from(G.nodes.items()) + T.add_edges_from(edges) + return T + + +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def maximum_spanning_tree(G, weight="weight", algorithm="kruskal", ignore_nan=False): + """Returns a maximum spanning tree or forest on an undirected graph `G`. + + Parameters + ---------- + G : undirected graph + An undirected graph. If `G` is connected, then the algorithm finds a + spanning tree. Otherwise, a spanning forest is found. + + weight : str + Data key to use for edge weights. + + algorithm : string + The algorithm to use when finding a maximum spanning tree. Valid + choices are 'kruskal', 'prim', or 'boruvka'. The default is + 'kruskal'. + + ignore_nan : bool (default: False) + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + + + Returns + ------- + G : NetworkX Graph + A maximum spanning tree or forest. + + + Examples + -------- + >>> G = nx.cycle_graph(4) + >>> G.add_edge(0, 3, weight=2) + >>> T = nx.maximum_spanning_tree(G) + >>> sorted(T.edges(data=True)) + [(0, 1, {}), (0, 3, {'weight': 2}), (1, 2, {})] + + + Notes + ----- + For Borůvka's algorithm, each edge must have a weight attribute, and + each edge weight must be distinct. + + For the other algorithms, if the graph edges do not have a weight + attribute a default weight of 1 will be used. + + There may be more than one tree with the same minimum or maximum weight. + See :mod:`networkx.tree.recognition` for more detailed definitions. + + Isolated nodes with self-loops are in the tree as edgeless isolated nodes. + + """ + edges = maximum_spanning_edges( + G, algorithm, weight, keys=True, data=True, ignore_nan=ignore_nan + ) + edges = list(edges) + T = G.__class__() # Same graph class as G + T.graph.update(G.graph) + T.add_nodes_from(G.nodes.items()) + T.add_edges_from(edges) + return T + + +@py_random_state(3) +@nx._dispatchable(preserve_edge_attrs=True, returns_graph=True) +def random_spanning_tree(G, weight=None, *, multiplicative=True, seed=None): + """ + Sample a random spanning tree using the edges weights of `G`. + + This function supports two different methods for determining the + probability of the graph. If ``multiplicative=True``, the probability + is based on the product of edge weights, and if ``multiplicative=False`` + it is based on the sum of the edge weight. However, since it is + easier to determine the total weight of all spanning trees for the + multiplicative version, that is significantly faster and should be used if + possible. Additionally, setting `weight` to `None` will cause a spanning tree + to be selected with uniform probability. + + The function uses algorithm A8 in [1]_ . + + Parameters + ---------- + G : nx.Graph + An undirected version of the original graph. + + weight : string + The edge key for the edge attribute holding edge weight. + + multiplicative : bool, default=True + If `True`, the probability of each tree is the product of its edge weight + over the sum of the product of all the spanning trees in the graph. If + `False`, the probability is the sum of its edge weight over the sum of + the sum of weights for all spanning trees in the graph. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + nx.Graph + A spanning tree using the distribution defined by the weight of the tree. + + References + ---------- + .. [1] V. Kulkarni, Generating random combinatorial objects, Journal of + Algorithms, 11 (1990), pp. 185–207 + """ + + def find_node(merged_nodes, node): + """ + We can think of clusters of contracted nodes as having one + representative in the graph. Each node which is not in merged_nodes + is still its own representative. Since a representative can be later + contracted, we need to recursively search though the dict to find + the final representative, but once we know it we can use path + compression to speed up the access of the representative for next time. + + This cannot be replaced by the standard NetworkX union_find since that + data structure will merge nodes with less representing nodes into the + one with more representing nodes but this function requires we merge + them using the order that contract_edges contracts using. + + Parameters + ---------- + merged_nodes : dict + The dict storing the mapping from node to representative + node + The node whose representative we seek + + Returns + ------- + The representative of the `node` + """ + if node not in merged_nodes: + return node + else: + rep = find_node(merged_nodes, merged_nodes[node]) + merged_nodes[node] = rep + return rep + + def prepare_graph(): + """ + For the graph `G`, remove all edges not in the set `V` and then + contract all edges in the set `U`. + + Returns + ------- + A copy of `G` which has had all edges not in `V` removed and all edges + in `U` contracted. + """ + + # The result is a MultiGraph version of G so that parallel edges are + # allowed during edge contraction + result = nx.MultiGraph(incoming_graph_data=G) + + # Remove all edges not in V + edges_to_remove = set(result.edges()).difference(V) + result.remove_edges_from(edges_to_remove) + + # Contract all edges in U + # + # Imagine that you have two edges to contract and they share an + # endpoint like this: + # [0] ----- [1] ----- [2] + # If we contract (0, 1) first, the contraction function will always + # delete the second node it is passed so the resulting graph would be + # [0] ----- [2] + # and edge (1, 2) no longer exists but (0, 2) would need to be contracted + # in its place now. That is why I use the below dict as a merge-find + # data structure with path compression to track how the nodes are merged. + merged_nodes = {} + + for u, v in U: + u_rep = find_node(merged_nodes, u) + v_rep = find_node(merged_nodes, v) + # We cannot contract a node with itself + if u_rep == v_rep: + continue + nx.contracted_nodes(result, u_rep, v_rep, self_loops=False, copy=False) + merged_nodes[v_rep] = u_rep + + return merged_nodes, result + + def spanning_tree_total_weight(G, weight): + """ + Find the sum of weights of the spanning trees of `G` using the + appropriate `method`. + + This is easy if the chosen method is 'multiplicative', since we can + use Kirchhoff's Tree Matrix Theorem directly. However, with the + 'additive' method, this process is slightly more complex and less + computationally efficient as we have to find the number of spanning + trees which contain each possible edge in the graph. + + Parameters + ---------- + G : NetworkX Graph + The graph to find the total weight of all spanning trees on. + + weight : string + The key for the weight edge attribute of the graph. + + Returns + ------- + float + The sum of either the multiplicative or additive weight for all + spanning trees in the graph. + """ + if multiplicative: + return number_of_spanning_trees(G, weight=weight) + else: + # There are two cases for the total spanning tree additive weight. + # 1. There is one edge in the graph. Then the only spanning tree is + # that edge itself, which will have a total weight of that edge + # itself. + if G.number_of_edges() == 1: + return G.edges(data=weight).__iter__().__next__()[2] + # 2. There are no edges or two or more edges in the graph. Then, we find the + # total weight of the spanning trees using the formula in the + # reference paper: take the weight of each edge and multiply it by + # the number of spanning trees which include that edge. This + # can be accomplished by contracting the edge and finding the + # multiplicative total spanning tree weight if the weight of each edge + # is assumed to be 1, which is conveniently built into networkx already, + # by calling number_of_spanning_trees with weight=None. + # Note that with no edges the returned value is just zero. + else: + total = 0 + for u, v, w in G.edges(data=weight): + total += w * nx.number_of_spanning_trees( + nx.contracted_edge(G, edge=(u, v), self_loops=False), + weight=None, + ) + return total + + if G.number_of_nodes() < 2: + # no edges in the spanning tree + return nx.empty_graph(G.nodes) + + U = set() + st_cached_value = 0 + V = set(G.edges()) + shuffled_edges = list(G.edges()) + seed.shuffle(shuffled_edges) + + for u, v in shuffled_edges: + e_weight = G[u][v][weight] if weight is not None else 1 + node_map, prepared_G = prepare_graph() + G_total_tree_weight = spanning_tree_total_weight(prepared_G, weight) + # Add the edge to U so that we can compute the total tree weight + # assuming we include that edge + # Now, if (u, v) cannot exist in G because it is fully contracted out + # of existence, then it by definition cannot influence G_e's Kirchhoff + # value. But, we also cannot pick it. + rep_edge = (find_node(node_map, u), find_node(node_map, v)) + # Check to see if the 'representative edge' for the current edge is + # in prepared_G. If so, then we can pick it. + if rep_edge in prepared_G.edges: + prepared_G_e = nx.contracted_edge( + prepared_G, edge=rep_edge, self_loops=False + ) + G_e_total_tree_weight = spanning_tree_total_weight(prepared_G_e, weight) + if multiplicative: + threshold = e_weight * G_e_total_tree_weight / G_total_tree_weight + else: + numerator = (st_cached_value + e_weight) * nx.number_of_spanning_trees( + prepared_G_e + ) + G_e_total_tree_weight + denominator = ( + st_cached_value * nx.number_of_spanning_trees(prepared_G) + + G_total_tree_weight + ) + threshold = numerator / denominator + else: + threshold = 0.0 + z = seed.uniform(0.0, 1.0) + if z > threshold: + # Remove the edge from V since we did not pick it. + V.remove((u, v)) + else: + # Add the edge to U since we picked it. + st_cached_value += e_weight + U.add((u, v)) + # If we decide to keep an edge, it may complete the spanning tree. + if len(U) == G.number_of_nodes() - 1: + spanning_tree = nx.Graph() + spanning_tree.add_edges_from(U) + return spanning_tree + raise Exception(f"Something went wrong! Only {len(U)} edges in the spanning tree!") + + +class SpanningTreeIterator: + """ + Iterate over all spanning trees of a graph in either increasing or + decreasing cost. + + Notes + ----- + This iterator uses the partition scheme from [1]_ (included edges, + excluded edges and open edges) as well as a modified Kruskal's Algorithm + to generate minimum spanning trees which respect the partition of edges. + For spanning trees with the same weight, ties are broken arbitrarily. + + References + ---------- + .. [1] G.K. Janssens, K. Sörensen, An algorithm to generate all spanning + trees in order of increasing cost, Pesquisa Operacional, 2005-08, + Vol. 25 (2), p. 219-229, + https://www.scielo.br/j/pope/a/XHswBwRwJyrfL88dmMwYNWp/?lang=en + """ + + @dataclass(order=True) + class Partition: + """ + This dataclass represents a partition and stores a dict with the edge + data and the weight of the minimum spanning tree of the partition dict. + """ + + mst_weight: float + partition_dict: dict = field(compare=False) + + def __copy__(self): + return SpanningTreeIterator.Partition( + self.mst_weight, self.partition_dict.copy() + ) + + def __init__(self, G, weight="weight", minimum=True, ignore_nan=False): + """ + Initialize the iterator + + Parameters + ---------- + G : nx.Graph + The directed graph which we need to iterate trees over + + weight : String, default = "weight" + The edge attribute used to store the weight of the edge + + minimum : bool, default = True + Return the trees in increasing order while true and decreasing order + while false. + + ignore_nan : bool, default = False + If a NaN is found as an edge weight normally an exception is raised. + If `ignore_nan is True` then that edge is ignored instead. + """ + self.G = G.copy() + self.G.__networkx_cache__ = None # Disable caching + self.weight = weight + self.minimum = minimum + self.ignore_nan = ignore_nan + # Randomly create a key for an edge attribute to hold the partition data + self.partition_key = ( + "SpanningTreeIterators super secret partition attribute name" + ) + + def __iter__(self): + """ + Returns + ------- + SpanningTreeIterator + The iterator object for this graph + """ + self.partition_queue = PriorityQueue() + self._clear_partition(self.G) + mst_weight = partition_spanning_tree( + self.G, self.minimum, self.weight, self.partition_key, self.ignore_nan + ).size(weight=self.weight) + + self.partition_queue.put( + self.Partition(mst_weight if self.minimum else -mst_weight, {}) + ) + + return self + + def __next__(self): + """ + Returns + ------- + (multi)Graph + The spanning tree of next greatest weight, which ties broken + arbitrarily. + """ + if self.partition_queue.empty(): + del self.G, self.partition_queue + raise StopIteration + + partition = self.partition_queue.get() + self._write_partition(partition) + next_tree = partition_spanning_tree( + self.G, self.minimum, self.weight, self.partition_key, self.ignore_nan + ) + self._partition(partition, next_tree) + + self._clear_partition(next_tree) + return next_tree + + def _partition(self, partition, partition_tree): + """ + Create new partitions based of the minimum spanning tree of the + current minimum partition. + + Parameters + ---------- + partition : Partition + The Partition instance used to generate the current minimum spanning + tree. + partition_tree : nx.Graph + The minimum spanning tree of the input partition. + """ + # create two new partitions with the data from the input partition dict + p1 = self.Partition(0, partition.partition_dict.copy()) + p2 = self.Partition(0, partition.partition_dict.copy()) + for e in partition_tree.edges: + # determine if the edge was open or included + if e not in partition.partition_dict: + # This is an open edge + p1.partition_dict[e] = EdgePartition.EXCLUDED + p2.partition_dict[e] = EdgePartition.INCLUDED + + self._write_partition(p1) + p1_mst = partition_spanning_tree( + self.G, + self.minimum, + self.weight, + self.partition_key, + self.ignore_nan, + ) + p1_mst_weight = p1_mst.size(weight=self.weight) + if nx.is_connected(p1_mst): + p1.mst_weight = p1_mst_weight if self.minimum else -p1_mst_weight + self.partition_queue.put(p1.__copy__()) + p1.partition_dict = p2.partition_dict.copy() + + def _write_partition(self, partition): + """ + Writes the desired partition into the graph to calculate the minimum + spanning tree. + + Parameters + ---------- + partition : Partition + A Partition dataclass describing a partition on the edges of the + graph. + """ + + partition_dict = partition.partition_dict + partition_key = self.partition_key + G = self.G + + edges = ( + G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True) + ) + for *e, d in edges: + d[partition_key] = partition_dict.get(tuple(e), EdgePartition.OPEN) + + def _clear_partition(self, G): + """ + Removes partition data from the graph + """ + partition_key = self.partition_key + edges = ( + G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True) + ) + for *e, d in edges: + if partition_key in d: + del d[partition_key] + + +@nx._dispatchable(edge_attrs="weight") +def number_of_spanning_trees(G, *, root=None, weight=None): + """Returns the number of spanning trees in `G`. + + A spanning tree for an undirected graph is a tree that connects + all nodes in the graph. For a directed graph, the analog of a + spanning tree is called a (spanning) arborescence. The arborescence + includes a unique directed path from the `root` node to each other node. + The graph must be weakly connected, and the root must be a node + that includes all nodes as successors [3]_. Note that to avoid + discussing sink-roots and reverse-arborescences, we have reversed + the edge orientation from [3]_ and use the in-degree laplacian. + + This function (when `weight` is `None`) returns the number of + spanning trees for an undirected graph and the number of + arborescences from a single root node for a directed graph. + When `weight` is the name of an edge attribute which holds the + weight value of each edge, the function returns the sum over + all trees of the multiplicative weight of each tree. That is, + the weight of the tree is the product of its edge weights. + + Kirchoff's Tree Matrix Theorem states that any cofactor of the + Laplacian matrix of a graph is the number of spanning trees in the + graph. (Here we use cofactors for a diagonal entry so that the + cofactor becomes the determinant of the matrix with one row + and its matching column removed.) For a weighted Laplacian matrix, + the cofactor is the sum across all spanning trees of the + multiplicative weight of each tree. That is, the weight of each + tree is the product of its edge weights. The theorem is also + known as Kirchhoff's theorem [1]_ and the Matrix-Tree theorem [2]_. + + For directed graphs, a similar theorem (Tutte's Theorem) holds with + the cofactor chosen to be the one with row and column removed that + correspond to the root. The cofactor is the number of arborescences + with the specified node as root. And the weighted version gives the + sum of the arborescence weights with root `root`. The arborescence + weight is the product of its edge weights. + + Parameters + ---------- + G : NetworkX graph + + root : node + A node in the directed graph `G` that has all nodes as descendants. + (This is ignored for undirected graphs.) + + weight : string or None, optional (default=None) + The name of the edge attribute holding the edge weight. + If `None`, then each edge is assumed to have a weight of 1. + + Returns + ------- + Number + Undirected graphs: + The number of spanning trees of the graph `G`. + Or the sum of all spanning tree weights of the graph `G` + where the weight of a tree is the product of its edge weights. + Directed graphs: + The number of arborescences of `G` rooted at node `root`. + Or the sum of all arborescence weights of the graph `G` with + specified root where the weight of an arborescence is the product + of its edge weights. + + Raises + ------ + NetworkXPointlessConcept + If `G` does not contain any nodes. + + NetworkXError + If the graph `G` is directed and the root node + is not specified or is not in G. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> round(nx.number_of_spanning_trees(G)) + 125 + + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=2) + >>> G.add_edge(1, 3, weight=1) + >>> G.add_edge(2, 3, weight=1) + >>> round(nx.number_of_spanning_trees(G, weight="weight")) + 5 + + Notes + ----- + Self-loops are excluded. Multi-edges are contracted in one edge + equal to the sum of the weights. + + References + ---------- + .. [1] Wikipedia + "Kirchhoff's theorem." + https://en.wikipedia.org/wiki/Kirchhoff%27s_theorem + .. [2] Kirchhoff, G. R. + Über die Auflösung der Gleichungen, auf welche man + bei der Untersuchung der linearen Vertheilung + Galvanischer Ströme geführt wird + Annalen der Physik und Chemie, vol. 72, pp. 497-508, 1847. + .. [3] Margoliash, J. + "Matrix-Tree Theorem for Directed Graphs" + https://www.math.uchicago.edu/~may/VIGRE/VIGRE2010/REUPapers/Margoliash.pdf + """ + import numpy as np + + if len(G) == 0: + raise nx.NetworkXPointlessConcept("Graph G must contain at least one node.") + + # undirected G + if not nx.is_directed(G): + if not nx.is_connected(G): + return 0 + G_laplacian = nx.laplacian_matrix(G, weight=weight).toarray() + return float(np.linalg.det(G_laplacian[1:, 1:])) + + # directed G + if root is None: + raise nx.NetworkXError("Input `root` must be provided when G is directed") + if root not in G: + raise nx.NetworkXError("The node root is not in the graph G.") + if not nx.is_weakly_connected(G): + return 0 + + # Compute directed Laplacian matrix + nodelist = [root] + [n for n in G if n != root] + A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight) + D = np.diag(A.sum(axis=0)) + G_laplacian = D - A + + # Compute number of spanning trees + return float(np.linalg.det(G_laplacian[1:, 1:])) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/operations.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/operations.py new file mode 100644 index 0000000..a75770a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/operations.py @@ -0,0 +1,106 @@ +"""Operations on trees.""" + +from functools import partial +from itertools import accumulate, chain + +import networkx as nx + +__all__ = ["join_trees"] + + +# Argument types don't match dispatching, but allow manual selection of backend +@nx._dispatchable(graphs=None, returns_graph=True) +def join_trees(rooted_trees, *, label_attribute=None, first_label=0): + """Returns a new rooted tree made by joining `rooted_trees` + + Constructs a new tree by joining each tree in `rooted_trees`. + A new root node is added and connected to each of the roots + of the input trees. While copying the nodes from the trees, + relabeling to integers occurs. If the `label_attribute` is provided, + the old node labels will be stored in the new tree under this attribute. + + Parameters + ---------- + rooted_trees : list + A list of pairs in which each left element is a NetworkX graph + object representing a tree and each right element is the root + node of that tree. The nodes of these trees will be relabeled to + integers. + + label_attribute : str + If provided, the old node labels will be stored in the new tree + under this node attribute. If not provided, the original labels + of the nodes in the input trees are not stored. + + first_label : int, optional (default=0) + Specifies the label for the new root node. If provided, the root node of the joined tree + will have this label. If not provided, the root node will default to a label of 0. + + Returns + ------- + NetworkX graph + The rooted tree resulting from joining the provided `rooted_trees`. The new tree has a root node + labeled as specified by `first_label` (defaulting to 0 if not provided). Subtrees from the input + `rooted_trees` are attached to this new root node. Each non-root node, if the `label_attribute` + is provided, has an attribute that indicates the original label of the node in the input tree. + + Notes + ----- + Trees are stored in NetworkX as NetworkX Graphs. There is no specific + enforcement of the fact that these are trees. Testing for each tree + can be done using :func:`networkx.is_tree`. + + Graph, edge, and node attributes are propagated from the given + rooted trees to the created tree. If there are any overlapping graph + attributes, those from later trees will overwrite those from earlier + trees in the tuple of positional arguments. + + Examples + -------- + Join two full balanced binary trees of height *h* to get a full + balanced binary tree of depth *h* + 1:: + + >>> h = 4 + >>> left = nx.balanced_tree(2, h) + >>> right = nx.balanced_tree(2, h) + >>> joined_tree = nx.join_trees([(left, 0), (right, 0)]) + >>> nx.is_isomorphic(joined_tree, nx.balanced_tree(2, h + 1)) + True + + """ + if not rooted_trees: + return nx.empty_graph(1) + + # Unzip the zipped list of (tree, root) pairs. + trees, roots = zip(*rooted_trees) + + # The join of the trees has the same type as the type of the first tree. + R = type(trees[0])() + + lengths = (len(tree) for tree in trees[:-1]) + first_labels = list(accumulate(lengths, initial=first_label + 1)) + + new_roots = [] + for tree, root, first_node in zip(trees, roots, first_labels): + new_root = first_node + list(tree.nodes()).index(root) + new_roots.append(new_root) + + # Relabel the nodes so that their union is the integers starting at first_label. + relabel = partial( + nx.convert_node_labels_to_integers, label_attribute=label_attribute + ) + new_trees = [ + relabel(tree, first_label=first_label) + for tree, first_label in zip(trees, first_labels) + ] + + # Add all sets of nodes and edges, attributes + for tree in new_trees: + R.update(tree) + + # Finally, join the subtrees at the root. We know first_label is unused by + # the way we relabeled the subtrees. + R.add_node(first_label) + R.add_edges_from((first_label, root) for root in new_roots) + + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/recognition.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/recognition.py new file mode 100644 index 0000000..a9eae98 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/recognition.py @@ -0,0 +1,273 @@ +""" +Recognition Tests +================= + +A *forest* is an acyclic, undirected graph, and a *tree* is a connected forest. +Depending on the subfield, there are various conventions for generalizing these +definitions to directed graphs. + +In one convention, directed variants of forest and tree are defined in an +identical manner, except that the direction of the edges is ignored. In effect, +each directed edge is treated as a single undirected edge. Then, additional +restrictions are imposed to define *branchings* and *arborescences*. + +In another convention, directed variants of forest and tree correspond to +the previous convention's branchings and arborescences, respectively. Then two +new terms, *polyforest* and *polytree*, are defined to correspond to the other +convention's forest and tree. + +Summarizing:: + + +-----------------------------+ + | Convention A | Convention B | + +=============================+ + | forest | polyforest | + | tree | polytree | + | branching | forest | + | arborescence | tree | + +-----------------------------+ + +Each convention has its reasons. The first convention emphasizes definitional +similarity in that directed forests and trees are only concerned with +acyclicity and do not have an in-degree constraint, just as their undirected +counterparts do not. The second convention emphasizes functional similarity +in the sense that the directed analog of a spanning tree is a spanning +arborescence. That is, take any spanning tree and choose one node as the root. +Then every edge is assigned a direction such there is a directed path from the +root to every other node. The result is a spanning arborescence. + +NetworkX follows convention "A". Explicitly, these are: + +undirected forest + An undirected graph with no undirected cycles. + +undirected tree + A connected, undirected forest. + +directed forest + A directed graph with no undirected cycles. Equivalently, the underlying + graph structure (which ignores edge orientations) is an undirected forest. + In convention B, this is known as a polyforest. + +directed tree + A weakly connected, directed forest. Equivalently, the underlying graph + structure (which ignores edge orientations) is an undirected tree. In + convention B, this is known as a polytree. + +branching + A directed forest with each node having, at most, one parent. So the maximum + in-degree is equal to 1. In convention B, this is known as a forest. + +arborescence + A directed tree with each node having, at most, one parent. So the maximum + in-degree is equal to 1. In convention B, this is known as a tree. + +For trees and arborescences, the adjective "spanning" may be added to designate +that the graph, when considered as a forest/branching, consists of a single +tree/arborescence that includes all nodes in the graph. It is true, by +definition, that every tree/arborescence is spanning with respect to the nodes +that define the tree/arborescence and so, it might seem redundant to introduce +the notion of "spanning". However, the nodes may represent a subset of +nodes from a larger graph, and it is in this context that the term "spanning" +becomes a useful notion. + +""" + +import networkx as nx + +__all__ = ["is_arborescence", "is_branching", "is_forest", "is_tree"] + + +@nx.utils.not_implemented_for("undirected") +@nx._dispatchable +def is_arborescence(G): + """ + Returns True if `G` is an arborescence. + + An arborescence is a directed tree with maximum in-degree equal to 1. + + Parameters + ---------- + G : graph + The graph to test. + + Returns + ------- + b : bool + A boolean that is True if `G` is an arborescence. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (0, 2), (2, 3), (3, 4)]) + >>> nx.is_arborescence(G) + True + >>> G.remove_edge(0, 1) + >>> G.add_edge(1, 2) # maximum in-degree is 2 + >>> nx.is_arborescence(G) + False + + Notes + ----- + In another convention, an arborescence is known as a *tree*. + + See Also + -------- + is_tree + + """ + return is_tree(G) and max(d for n, d in G.in_degree()) <= 1 + + +@nx.utils.not_implemented_for("undirected") +@nx._dispatchable +def is_branching(G): + """ + Returns True if `G` is a branching. + + A branching is a directed forest with maximum in-degree equal to 1. + + Parameters + ---------- + G : directed graph + The directed graph to test. + + Returns + ------- + b : bool + A boolean that is True if `G` is a branching. + + Examples + -------- + >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)]) + >>> nx.is_branching(G) + True + >>> G.remove_edge(2, 3) + >>> G.add_edge(3, 1) # maximum in-degree is 2 + >>> nx.is_branching(G) + False + + Notes + ----- + In another convention, a branching is also known as a *forest*. + + See Also + -------- + is_forest + + """ + return is_forest(G) and max(d for n, d in G.in_degree()) <= 1 + + +@nx._dispatchable +def is_forest(G): + """ + Returns True if `G` is a forest. + + A forest is a graph with no undirected cycles. + + For directed graphs, `G` is a forest if the underlying graph is a forest. + The underlying graph is obtained by treating each directed edge as a single + undirected edge in a multigraph. + + Parameters + ---------- + G : graph + The graph to test. + + Returns + ------- + b : bool + A boolean that is True if `G` is a forest. + + Raises + ------ + NetworkXPointlessConcept + If `G` is empty. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5)]) + >>> nx.is_forest(G) + True + >>> G.add_edge(4, 1) + >>> nx.is_forest(G) + False + + Notes + ----- + In another convention, a directed forest is known as a *polyforest* and + then *forest* corresponds to a *branching*. + + See Also + -------- + is_branching + + """ + if len(G) == 0: + raise nx.exception.NetworkXPointlessConcept("G has no nodes.") + + if G.is_directed(): + components = (G.subgraph(c) for c in nx.weakly_connected_components(G)) + else: + components = (G.subgraph(c) for c in nx.connected_components(G)) + + return all(len(c) - 1 == c.number_of_edges() for c in components) + + +@nx._dispatchable +def is_tree(G): + """ + Returns True if `G` is a tree. + + A tree is a connected graph with no undirected cycles. + + For directed graphs, `G` is a tree if the underlying graph is a tree. The + underlying graph is obtained by treating each directed edge as a single + undirected edge in a multigraph. + + Parameters + ---------- + G : graph + The graph to test. + + Returns + ------- + b : bool + A boolean that is True if `G` is a tree. + + Raises + ------ + NetworkXPointlessConcept + If `G` is empty. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edges_from([(1, 2), (1, 3), (2, 4), (2, 5)]) + >>> nx.is_tree(G) # n-1 edges + True + >>> G.add_edge(3, 4) + >>> nx.is_tree(G) # n edges + False + + Notes + ----- + In another convention, a directed tree is known as a *polytree* and then + *tree* corresponds to an *arborescence*. + + See Also + -------- + is_arborescence + + """ + if len(G) == 0: + raise nx.exception.NetworkXPointlessConcept("G has no nodes.") + + if G.is_directed(): + is_connected = nx.is_weakly_connected + else: + is_connected = nx.is_connected + + # A connected graph with no cycles has n-1 edges. + return len(G) - 1 == G.number_of_edges() and is_connected(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..a8a1996 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_branchings.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_branchings.cpython-312.pyc new file mode 100644 index 0000000..0c9ecdd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_branchings.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_coding.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_coding.cpython-312.pyc new file mode 100644 index 0000000..ca7c9bd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_coding.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_decomposition.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_decomposition.cpython-312.pyc new file mode 100644 index 0000000..fa02870 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_decomposition.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_mst.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_mst.cpython-312.pyc new file mode 100644 index 0000000..93c1fa7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_mst.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_operations.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_operations.cpython-312.pyc new file mode 100644 index 0000000..2923ac5 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_operations.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_recognition.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_recognition.cpython-312.pyc new file mode 100644 index 0000000..dd9c80d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/__pycache__/test_recognition.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_branchings.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_branchings.py new file mode 100644 index 0000000..9be976a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_branchings.py @@ -0,0 +1,624 @@ +import math +from operator import itemgetter + +import pytest + +import networkx as nx +from networkx.algorithms.tree import branchings, recognition + +np = pytest.importorskip("numpy") + +# +# Explicitly discussed examples from Edmonds paper. +# + +# Used in Figures A-F. +# +# fmt: off +G_array = np.array([ + # 0 1 2 3 4 5 6 7 8 + [0, 0, 12, 0, 12, 0, 0, 0, 0], # 0 + [4, 0, 0, 0, 0, 13, 0, 0, 0], # 1 + [0, 17, 0, 21, 0, 12, 0, 0, 0], # 2 + [5, 0, 0, 0, 17, 0, 18, 0, 0], # 3 + [0, 0, 0, 0, 0, 0, 0, 12, 0], # 4 + [0, 0, 0, 0, 0, 0, 14, 0, 12], # 5 + [0, 0, 21, 0, 0, 0, 0, 0, 15], # 6 + [0, 0, 0, 19, 0, 0, 15, 0, 0], # 7 + [0, 0, 0, 0, 0, 0, 0, 18, 0], # 8 +], dtype=int) + +# Two copies of the graph from the original paper as disconnected components +G_big_array = np.zeros(np.array(G_array.shape) * 2, dtype=int) +G_big_array[:G_array.shape[0], :G_array.shape[1]] = G_array +G_big_array[G_array.shape[0]:, G_array.shape[1]:] = G_array + +# fmt: on + + +def G1(): + G = nx.from_numpy_array(G_array, create_using=nx.MultiDiGraph) + return G + + +def G2(): + # Now we shift all the weights by -10. + # Should not affect optimal arborescence, but does affect optimal branching. + Garr = G_array.copy() + Garr[np.nonzero(Garr)] -= 10 + G = nx.from_numpy_array(Garr, create_using=nx.MultiDiGraph) + return G + + +# An optimal branching for G1 that is also a spanning arborescence. So it is +# also an optimal spanning arborescence. +# +optimal_arborescence_1 = [ + (0, 2, 12), + (2, 1, 17), + (2, 3, 21), + (1, 5, 13), + (3, 4, 17), + (3, 6, 18), + (6, 8, 15), + (8, 7, 18), +] + +# For G2, the optimal branching of G1 (with shifted weights) is no longer +# an optimal branching, but it is still an optimal spanning arborescence +# (just with shifted weights). An optimal branching for G2 is similar to what +# appears in figure G (this is greedy_subopt_branching_1a below), but with the +# edge (3, 0, 5), which is now (3, 0, -5), removed. Thus, the optimal branching +# is not a spanning arborescence. The code finds optimal_branching_2a. +# An alternative and equivalent branching is optimal_branching_2b. We would +# need to modify the code to iterate through all equivalent optimal branchings. +# +# These are maximal branchings or arborescences. +optimal_branching_2a = [ + (5, 6, 4), + (6, 2, 11), + (6, 8, 5), + (8, 7, 8), + (2, 1, 7), + (2, 3, 11), + (3, 4, 7), +] +optimal_branching_2b = [ + (8, 7, 8), + (7, 3, 9), + (3, 4, 7), + (3, 6, 8), + (6, 2, 11), + (2, 1, 7), + (1, 5, 3), +] +optimal_arborescence_2 = [ + (0, 2, 2), + (2, 1, 7), + (2, 3, 11), + (1, 5, 3), + (3, 4, 7), + (3, 6, 8), + (6, 8, 5), + (8, 7, 8), +] + +# Two suboptimal maximal branchings on G1 obtained from a greedy algorithm. +# 1a matches what is shown in Figure G in Edmonds's paper. +greedy_subopt_branching_1a = [ + (5, 6, 14), + (6, 2, 21), + (6, 8, 15), + (8, 7, 18), + (2, 1, 17), + (2, 3, 21), + (3, 0, 5), + (3, 4, 17), +] +greedy_subopt_branching_1b = [ + (8, 7, 18), + (7, 6, 15), + (6, 2, 21), + (2, 1, 17), + (2, 3, 21), + (1, 5, 13), + (3, 0, 5), + (3, 4, 17), +] + + +def build_branching(edges, double=False): + G = nx.DiGraph() + for u, v, weight in edges: + G.add_edge(u, v, weight=weight) + if double: + G.add_edge(u + 9, v + 9, weight=weight) + return G + + +def sorted_edges(G, attr="weight", default=1): + edges = [(u, v, data.get(attr, default)) for (u, v, data) in G.edges(data=True)] + edges = sorted(edges, key=lambda x: (x[2], x[1], x[0])) + return edges + + +def assert_equal_branchings(G1, G2, attr="weight", default=1): + edges1 = list(G1.edges(data=True)) + edges2 = list(G2.edges(data=True)) + assert len(edges1) == len(edges2) + + # Grab the weights only. + e1 = sorted_edges(G1, attr, default) + e2 = sorted_edges(G2, attr, default) + + for a, b in zip(e1, e2): + assert a[:2] == b[:2] + np.testing.assert_almost_equal(a[2], b[2]) + + +################ + + +def test_optimal_branching1(): + G = build_branching(optimal_arborescence_1) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 131 + + +def test_optimal_branching2a(): + G = build_branching(optimal_branching_2a) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 53 + + +def test_optimal_branching2b(): + G = build_branching(optimal_branching_2b) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 53 + + +def test_optimal_arborescence2(): + G = build_branching(optimal_arborescence_2) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 51 + + +def test_greedy_suboptimal_branching1a(): + G = build_branching(greedy_subopt_branching_1a) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 128 + + +def test_greedy_suboptimal_branching1b(): + G = build_branching(greedy_subopt_branching_1b) + assert recognition.is_arborescence(G), True + assert branchings.branching_weight(G) == 127 + + +def test_greedy_max1(): + # Standard test. + # + G = G1() + B = branchings.greedy_branching(G) + # There are only two possible greedy branchings. The sorting is such + # that it should equal the second suboptimal branching: 1b. + B_ = build_branching(greedy_subopt_branching_1b) + assert_equal_branchings(B, B_) + + +def test_greedy_branching_kwarg_kind(): + G = G1() + with pytest.raises(nx.NetworkXException, match="Unknown value for `kind`."): + B = branchings.greedy_branching(G, kind="lol") + + +def test_greedy_branching_for_unsortable_nodes(): + G = nx.DiGraph() + G.add_weighted_edges_from([((2, 3), 5, 1), (3, "a", 1), (2, 4, 5)]) + edges = [(u, v, data.get("weight", 1)) for (u, v, data) in G.edges(data=True)] + with pytest.raises(TypeError): + edges.sort(key=itemgetter(2, 0, 1), reverse=True) + B = branchings.greedy_branching(G, kind="max").edges(data=True) + assert list(B) == [ + ((2, 3), 5, {"weight": 1}), + (3, "a", {"weight": 1}), + (2, 4, {"weight": 5}), + ] + + +def test_greedy_max2(): + # Different default weight. + # + G = G1() + del G[1][0][0]["weight"] + B = branchings.greedy_branching(G, default=6) + # Chosen so that edge (3,0,5) is not selected and (1,0,6) is instead. + + edges = [ + (1, 0, 6), + (1, 5, 13), + (7, 6, 15), + (2, 1, 17), + (3, 4, 17), + (8, 7, 18), + (2, 3, 21), + (6, 2, 21), + ] + B_ = build_branching(edges) + assert_equal_branchings(B, B_) + + +def test_greedy_max3(): + # All equal weights. + # + G = G1() + B = branchings.greedy_branching(G, attr=None) + + # This is mostly arbitrary...the output was generated by running the algo. + edges = [ + (2, 1, 1), + (3, 0, 1), + (3, 4, 1), + (5, 8, 1), + (6, 2, 1), + (7, 3, 1), + (7, 6, 1), + (8, 7, 1), + ] + B_ = build_branching(edges) + assert_equal_branchings(B, B_, default=1) + + +def test_greedy_min(): + G = G1() + B = branchings.greedy_branching(G, kind="min") + + edges = [ + (1, 0, 4), + (0, 2, 12), + (0, 4, 12), + (2, 5, 12), + (4, 7, 12), + (5, 8, 12), + (5, 6, 14), + (7, 3, 19), + ] + B_ = build_branching(edges) + assert_equal_branchings(B, B_) + + +def test_edmonds1_maxbranch(): + G = G1() + x = branchings.maximum_branching(G) + x_ = build_branching(optimal_arborescence_1) + assert_equal_branchings(x, x_) + + +def test_edmonds1_maxarbor(): + G = G1() + x = branchings.maximum_spanning_arborescence(G) + x_ = build_branching(optimal_arborescence_1) + assert_equal_branchings(x, x_) + + +def test_edmonds1_minimal_branching(): + # graph will have something like a minimum arborescence but no spanning one + G = nx.from_numpy_array(G_big_array, create_using=nx.DiGraph) + B = branchings.minimal_branching(G) + edges = [ + (3, 0, 5), + (0, 2, 12), + (0, 4, 12), + (2, 5, 12), + (4, 7, 12), + (5, 8, 12), + (5, 6, 14), + (2, 1, 17), + ] + B_ = build_branching(edges, double=True) + assert_equal_branchings(B, B_) + + +def test_edmonds2_maxbranch(): + G = G2() + x = branchings.maximum_branching(G) + x_ = build_branching(optimal_branching_2a) + assert_equal_branchings(x, x_) + + +def test_edmonds2_maxarbor(): + G = G2() + x = branchings.maximum_spanning_arborescence(G) + x_ = build_branching(optimal_arborescence_2) + assert_equal_branchings(x, x_) + + +def test_edmonds2_minarbor(): + G = G1() + x = branchings.minimum_spanning_arborescence(G) + # This was obtained from algorithm. Need to verify it independently. + # Branch weight is: 96 + edges = [ + (3, 0, 5), + (0, 2, 12), + (0, 4, 12), + (2, 5, 12), + (4, 7, 12), + (5, 8, 12), + (5, 6, 14), + (2, 1, 17), + ] + x_ = build_branching(edges) + assert_equal_branchings(x, x_) + + +def test_edmonds3_minbranch1(): + G = G1() + x = branchings.minimum_branching(G) + edges = [] + x_ = build_branching(edges) + assert_equal_branchings(x, x_) + + +def test_edmonds3_minbranch2(): + G = G1() + G.add_edge(8, 9, weight=-10) + x = branchings.minimum_branching(G) + edges = [(8, 9, -10)] + x_ = build_branching(edges) + assert_equal_branchings(x, x_) + + +# Need more tests + + +def test_mst(): + # Make sure we get the same results for undirected graphs. + # Example from: https://en.wikipedia.org/wiki/Kruskal's_algorithm + G = nx.Graph() + edgelist = [ + (0, 3, [("weight", 5)]), + (0, 1, [("weight", 7)]), + (1, 3, [("weight", 9)]), + (1, 2, [("weight", 8)]), + (1, 4, [("weight", 7)]), + (3, 4, [("weight", 15)]), + (3, 5, [("weight", 6)]), + (2, 4, [("weight", 5)]), + (4, 5, [("weight", 8)]), + (4, 6, [("weight", 9)]), + (5, 6, [("weight", 11)]), + ] + G.add_edges_from(edgelist) + G = G.to_directed() + x = branchings.minimum_spanning_arborescence(G) + + edges = [ + ({0, 1}, 7), + ({0, 3}, 5), + ({3, 5}, 6), + ({1, 4}, 7), + ({4, 2}, 5), + ({4, 6}, 9), + ] + + assert x.number_of_edges() == len(edges) + for u, v, d in x.edges(data=True): + assert ({u, v}, d["weight"]) in edges + + +def test_mixed_nodetypes(): + # Smoke test to make sure no TypeError is raised for mixed node types. + G = nx.Graph() + edgelist = [(0, 3, [("weight", 5)]), (0, "1", [("weight", 5)])] + G.add_edges_from(edgelist) + G = G.to_directed() + x = branchings.minimum_spanning_arborescence(G) + + +def test_edmonds1_minbranch(): + # Using -G_array and min should give the same as optimal_arborescence_1, + # but with all edges negative. + edges = [(u, v, -w) for (u, v, w) in optimal_arborescence_1] + + G = nx.from_numpy_array(-G_array, create_using=nx.DiGraph) + + # Quickly make sure max branching is empty. + x = branchings.maximum_branching(G) + x_ = build_branching([]) + assert_equal_branchings(x, x_) + + # Now test the min branching. + x = branchings.minimum_branching(G) + x_ = build_branching(edges) + assert_equal_branchings(x, x_) + + +def test_edge_attribute_preservation_normal_graph(): + # Test that edge attributes are preserved when finding an optimum graph + # using the Edmonds class for normal graphs. + G = nx.Graph() + + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] + G.add_edges_from(edgelist) + + B = branchings.maximum_branching(G, preserve_attrs=True) + + assert B[0][1]["otherattr"] == 1 + assert B[0][1]["otherattr2"] == 3 + + +def test_edge_attribute_preservation_multigraph(): + # Test that edge attributes are preserved when finding an optimum graph + # using the Edmonds class for multigraphs. + G = nx.MultiGraph() + + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] + G.add_edges_from(edgelist * 2) # Make sure we have duplicate edge paths + + B = branchings.maximum_branching(G, preserve_attrs=True) + + assert B[0][1][0]["otherattr"] == 1 + assert B[0][1][0]["otherattr2"] == 3 + + +def test_edge_attribute_discard(): + # Test that edge attributes are discarded if we do not specify to keep them + G = nx.Graph() + + edgelist = [ + (0, 1, [("weight", 5), ("otherattr", 1), ("otherattr2", 3)]), + (0, 2, [("weight", 5), ("otherattr", 2), ("otherattr2", 2)]), + (1, 2, [("weight", 6), ("otherattr", 3), ("otherattr2", 1)]), + ] + G.add_edges_from(edgelist) + + B = branchings.maximum_branching(G, preserve_attrs=False) + + edge_dict = B[0][1] + with pytest.raises(KeyError): + _ = edge_dict["otherattr"] + + +def test_partition_spanning_arborescence(): + """ + Test that we can generate minimum spanning arborescences which respect the + given partition. + """ + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + G[3][0]["partition"] = nx.EdgePartition.EXCLUDED + G[2][3]["partition"] = nx.EdgePartition.INCLUDED + G[7][3]["partition"] = nx.EdgePartition.EXCLUDED + G[0][2]["partition"] = nx.EdgePartition.EXCLUDED + G[6][2]["partition"] = nx.EdgePartition.INCLUDED + + actual_edges = [ + (0, 4, 12), + (1, 0, 4), + (1, 5, 13), + (2, 3, 21), + (4, 7, 12), + (5, 6, 14), + (5, 8, 12), + (6, 2, 21), + ] + + B = branchings.minimum_spanning_arborescence(G, partition="partition") + assert_equal_branchings(build_branching(actual_edges), B) + + +def test_arborescence_iterator_min(): + """ + Tests the arborescence iterator. + + A brute force method found 680 arborescences in this graph. + This test will not verify all of them individually, but will check two + things + + * The iterator returns 680 arborescences + * The weight of the arborescences is non-strictly increasing + + for more information please visit + https://mjschwenne.github.io/2021/06/10/implementing-the-iterators.html + """ + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + + arborescence_count = 0 + arborescence_weight = -math.inf + for B in branchings.ArborescenceIterator(G): + arborescence_count += 1 + new_arborescence_weight = B.size(weight="weight") + assert new_arborescence_weight >= arborescence_weight + arborescence_weight = new_arborescence_weight + + assert arborescence_count == 680 + + +def test_arborescence_iterator_max(): + """ + Tests the arborescence iterator. + + A brute force method found 680 arborescences in this graph. + This test will not verify all of them individually, but will check two + things + + * The iterator returns 680 arborescences + * The weight of the arborescences is non-strictly decreasing + + for more information please visit + https://mjschwenne.github.io/2021/06/10/implementing-the-iterators.html + """ + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + + arborescence_count = 0 + arborescence_weight = math.inf + for B in branchings.ArborescenceIterator(G, minimum=False): + arborescence_count += 1 + new_arborescence_weight = B.size(weight="weight") + assert new_arborescence_weight <= arborescence_weight + arborescence_weight = new_arborescence_weight + + assert arborescence_count == 680 + + +def test_arborescence_iterator_initial_partition(): + """ + Tests the arborescence iterator with three included edges and three excluded + in the initial partition. + + A brute force method similar to the one used in the above tests found that + there are 16 arborescences which contain the included edges and not the + excluded edges. + """ + G = nx.from_numpy_array(G_array, create_using=nx.DiGraph) + included_edges = [(1, 0), (5, 6), (8, 7)] + excluded_edges = [(0, 2), (3, 6), (1, 5)] + + arborescence_count = 0 + arborescence_weight = -math.inf + for B in branchings.ArborescenceIterator( + G, init_partition=(included_edges, excluded_edges) + ): + arborescence_count += 1 + new_arborescence_weight = B.size(weight="weight") + assert new_arborescence_weight >= arborescence_weight + arborescence_weight = new_arborescence_weight + for e in included_edges: + assert e in B.edges + for e in excluded_edges: + assert e not in B.edges + assert arborescence_count == 16 + + +def test_branchings_with_default_weights(): + """ + Tests that various branching algorithms work on graphs without weights. + For more information, see issue #7279. + """ + graph = nx.erdos_renyi_graph(10, p=0.2, directed=True, seed=123) + + assert all("weight" not in d for (u, v, d) in graph.edges(data=True)), ( + "test is for graphs without a weight attribute" + ) + + # Calling these functions will modify graph inplace to add weights + # copy the graph to avoid this. + nx.minimum_spanning_arborescence(graph.copy()) + nx.maximum_spanning_arborescence(graph.copy()) + nx.minimum_branching(graph.copy()) + nx.maximum_branching(graph.copy()) + nx.algorithms.tree.minimal_branching(graph.copy()) + nx.algorithms.tree.branching_weight(graph.copy()) + nx.algorithms.tree.greedy_branching(graph.copy()) + + assert all("weight" not in d for (u, v, d) in graph.edges(data=True)), ( + "The above calls should not modify the initial graph in-place" + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_coding.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_coding.py new file mode 100644 index 0000000..26bd408 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_coding.py @@ -0,0 +1,114 @@ +"""Unit tests for the :mod:`~networkx.algorithms.tree.coding` module.""" + +from itertools import product + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +class TestPruferSequence: + """Unit tests for the Prüfer sequence encoding and decoding + functions. + + """ + + def test_nontree(self): + with pytest.raises(nx.NotATree): + G = nx.cycle_graph(3) + nx.to_prufer_sequence(G) + + def test_null_graph(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.to_prufer_sequence(nx.null_graph()) + + def test_trivial_graph(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.to_prufer_sequence(nx.trivial_graph()) + + def test_bad_integer_labels(self): + with pytest.raises(KeyError): + T = nx.Graph(nx.utils.pairwise("abc")) + nx.to_prufer_sequence(T) + + def test_encoding(self): + """Tests for encoding a tree as a Prüfer sequence using the + iterative strategy. + + """ + # Example from Wikipedia. + tree = nx.Graph([(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)]) + sequence = nx.to_prufer_sequence(tree) + assert sequence == [3, 3, 3, 4] + + def test_decoding(self): + """Tests for decoding a tree from a Prüfer sequence.""" + # Example from Wikipedia. + sequence = [3, 3, 3, 4] + tree = nx.from_prufer_sequence(sequence) + assert nodes_equal(list(tree), list(range(6))) + edges = [(0, 3), (1, 3), (2, 3), (3, 4), (4, 5)] + assert edges_equal(list(tree.edges()), edges) + + def test_decoding2(self): + # Example from "An Optimal Algorithm for Prufer Codes". + sequence = [2, 4, 0, 1, 3, 3] + tree = nx.from_prufer_sequence(sequence) + assert nodes_equal(list(tree), list(range(8))) + edges = [(0, 1), (0, 4), (1, 3), (2, 4), (2, 5), (3, 6), (3, 7)] + assert edges_equal(list(tree.edges()), edges) + + def test_inverse(self): + """Tests that the encoding and decoding functions are inverses.""" + for T in nx.nonisomorphic_trees(4): + T2 = nx.from_prufer_sequence(nx.to_prufer_sequence(T)) + assert nodes_equal(list(T), list(T2)) + assert edges_equal(list(T.edges()), list(T2.edges())) + + for seq in product(range(4), repeat=2): + seq2 = nx.to_prufer_sequence(nx.from_prufer_sequence(seq)) + assert list(seq) == seq2 + + +class TestNestedTuple: + """Unit tests for the nested tuple encoding and decoding functions.""" + + def test_nontree(self): + with pytest.raises(nx.NotATree): + G = nx.cycle_graph(3) + nx.to_nested_tuple(G, 0) + + def test_unknown_root(self): + with pytest.raises(nx.NodeNotFound): + G = nx.path_graph(2) + nx.to_nested_tuple(G, "bogus") + + def test_encoding(self): + T = nx.full_rary_tree(2, 2**3 - 1) + expected = (((), ()), ((), ())) + actual = nx.to_nested_tuple(T, 0) + assert nodes_equal(expected, actual) + + def test_canonical_form(self): + T = nx.Graph() + T.add_edges_from([(0, 1), (0, 2), (0, 3)]) + T.add_edges_from([(1, 4), (1, 5)]) + T.add_edges_from([(3, 6), (3, 7)]) + root = 0 + actual = nx.to_nested_tuple(T, root, canonical_form=True) + expected = ((), ((), ()), ((), ())) + assert actual == expected + + def test_decoding(self): + balanced = (((), ()), ((), ())) + expected = nx.full_rary_tree(2, 2**3 - 1) + actual = nx.from_nested_tuple(balanced) + assert nx.is_isomorphic(expected, actual) + + def test_sensible_relabeling(self): + balanced = (((), ()), ((), ())) + T = nx.from_nested_tuple(balanced, sensible_relabeling=True) + edges = [(0, 1), (0, 2), (1, 3), (1, 4), (2, 5), (2, 6)] + assert nodes_equal(list(T), list(range(2**3 - 1))) + assert edges_equal(list(T.edges()), edges) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_decomposition.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_decomposition.py new file mode 100644 index 0000000..8c37605 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_decomposition.py @@ -0,0 +1,79 @@ +import networkx as nx +from networkx.algorithms.tree.decomposition import junction_tree + + +def test_junction_tree_directed_confounders(): + B = nx.DiGraph() + B.add_edges_from([("A", "C"), ("B", "C"), ("C", "D"), ("C", "E")]) + + G = junction_tree(B) + J = nx.Graph() + J.add_edges_from( + [ + (("C", "E"), ("C",)), + (("C",), ("A", "B", "C")), + (("A", "B", "C"), ("C",)), + (("C",), ("C", "D")), + ] + ) + + assert nx.is_isomorphic(G, J) + + +def test_junction_tree_directed_unconnected_nodes(): + B = nx.DiGraph() + B.add_nodes_from([("A", "B", "C", "D")]) + G = junction_tree(B) + + J = nx.Graph() + J.add_nodes_from([("A", "B", "C", "D")]) + + assert nx.is_isomorphic(G, J) + + +def test_junction_tree_directed_cascade(): + B = nx.DiGraph() + B.add_edges_from([("A", "B"), ("B", "C"), ("C", "D")]) + G = junction_tree(B) + + J = nx.Graph() + J.add_edges_from( + [ + (("A", "B"), ("B",)), + (("B",), ("B", "C")), + (("B", "C"), ("C",)), + (("C",), ("C", "D")), + ] + ) + assert nx.is_isomorphic(G, J) + + +def test_junction_tree_directed_unconnected_edges(): + B = nx.DiGraph() + B.add_edges_from([("A", "B"), ("C", "D"), ("E", "F")]) + G = junction_tree(B) + + J = nx.Graph() + J.add_nodes_from([("A", "B"), ("C", "D"), ("E", "F")]) + + assert nx.is_isomorphic(G, J) + + +def test_junction_tree_undirected(): + B = nx.Graph() + B.add_edges_from([("A", "C"), ("A", "D"), ("B", "C"), ("C", "E")]) + G = junction_tree(B) + + J = nx.Graph() + J.add_edges_from( + [ + (("A", "D"), ("A",)), + (("A",), ("A", "C")), + (("A", "C"), ("C",)), + (("C",), ("B", "C")), + (("B", "C"), ("C",)), + (("C",), ("C", "E")), + ] + ) + + assert nx.is_isomorphic(G, J) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_mst.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_mst.py new file mode 100644 index 0000000..f8945a7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_mst.py @@ -0,0 +1,918 @@ +"""Unit tests for the :mod:`networkx.algorithms.tree.mst` module.""" + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +def test_unknown_algorithm(): + with pytest.raises(ValueError): + nx.minimum_spanning_tree(nx.Graph(), algorithm="random") + with pytest.raises( + ValueError, match="random is not a valid choice for an algorithm." + ): + nx.maximum_spanning_edges(nx.Graph(), algorithm="random") + + +class MinimumSpanningTreeTestBase: + """Base class for test classes for minimum spanning tree algorithms. + This class contains some common tests that will be inherited by + subclasses. Each subclass must have a class attribute + :data:`algorithm` that is a string representing the algorithm to + run, as described under the ``algorithm`` keyword argument for the + :func:`networkx.minimum_spanning_edges` function. Subclasses can + then implement any algorithm-specific tests. + """ + + def setup_method(self, method): + """Creates an example graph and stores the expected minimum and + maximum spanning tree edges. + """ + # This stores the class attribute `algorithm` in an instance attribute. + self.algo = self.algorithm + # This example graph comes from Wikipedia: + # https://en.wikipedia.org/wiki/Kruskal's_algorithm + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] + self.G = nx.Graph() + self.G.add_weighted_edges_from(edges) + self.minimum_spanning_edgelist = [ + (0, 1, {"weight": 7}), + (0, 3, {"weight": 5}), + (1, 4, {"weight": 7}), + (2, 4, {"weight": 5}), + (3, 5, {"weight": 6}), + (4, 6, {"weight": 9}), + ] + self.maximum_spanning_edgelist = [ + (0, 1, {"weight": 7}), + (1, 2, {"weight": 8}), + (1, 3, {"weight": 9}), + (3, 4, {"weight": 15}), + (4, 6, {"weight": 9}), + (5, 6, {"weight": 11}), + ] + + def test_minimum_edges(self): + edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo) + # Edges from the spanning edges functions don't come in sorted + # orientation, so we need to sort each edge individually. + actual = sorted((min(u, v), max(u, v), d) for u, v, d in edges) + assert edges_equal(actual, self.minimum_spanning_edgelist) + + def test_maximum_edges(self): + edges = nx.maximum_spanning_edges(self.G, algorithm=self.algo) + # Edges from the spanning edges functions don't come in sorted + # orientation, so we need to sort each edge individually. + actual = sorted((min(u, v), max(u, v), d) for u, v, d in edges) + assert edges_equal(actual, self.maximum_spanning_edgelist) + + def test_without_data(self): + edges = nx.minimum_spanning_edges(self.G, algorithm=self.algo, data=False) + # Edges from the spanning edges functions don't come in sorted + # orientation, so we need to sort each edge individually. + actual = sorted((min(u, v), max(u, v)) for u, v in edges) + expected = [(u, v) for u, v, d in self.minimum_spanning_edgelist] + assert edges_equal(actual, expected) + + def test_nan_weights(self): + # Edge weights NaN never appear in the spanning tree. see #2164 + G = self.G + G.add_edge(0, 12, weight=float("nan")) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) + actual = sorted((min(u, v), max(u, v)) for u, v in edges) + expected = [(u, v) for u, v, d in self.minimum_spanning_edgelist] + assert edges_equal(actual, expected) + # Now test for raising exception + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=False + ) + with pytest.raises(ValueError): + list(edges) + # test default for ignore_nan as False + edges = nx.minimum_spanning_edges(G, algorithm=self.algo, data=False) + with pytest.raises(ValueError): + list(edges) + + def test_nan_weights_MultiGraph(self): + G = nx.MultiGraph() + G.add_edge(0, 12, weight=float("nan")) + edges = nx.minimum_spanning_edges( + G, algorithm="prim", data=False, ignore_nan=False + ) + with pytest.raises(ValueError): + list(edges) + # test default for ignore_nan as False + edges = nx.minimum_spanning_edges(G, algorithm="prim", data=False) + with pytest.raises(ValueError): + list(edges) + + def test_nan_weights_order(self): + # now try again with a nan edge at the beginning of G.nodes + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] + G = nx.Graph() + G.add_weighted_edges_from([(u + 1, v + 1, wt) for u, v, wt in edges]) + G.add_edge(0, 7, weight=float("nan")) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) + actual = sorted((min(u, v), max(u, v)) for u, v in edges) + shift = [(u + 1, v + 1) for u, v, d in self.minimum_spanning_edgelist] + assert edges_equal(actual, shift) + + def test_isolated_node(self): + # now try again with an isolated node + edges = [ + (0, 1, 7), + (0, 3, 5), + (1, 2, 8), + (1, 3, 9), + (1, 4, 7), + (2, 4, 5), + (3, 4, 15), + (3, 5, 6), + (4, 5, 8), + (4, 6, 9), + (5, 6, 11), + ] + G = nx.Graph() + G.add_weighted_edges_from([(u + 1, v + 1, wt) for u, v, wt in edges]) + G.add_node(0) + edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, data=False, ignore_nan=True + ) + actual = sorted((min(u, v), max(u, v)) for u, v in edges) + shift = [(u + 1, v + 1) for u, v, d in self.minimum_spanning_edgelist] + assert edges_equal(actual, shift) + + def test_minimum_tree(self): + T = nx.minimum_spanning_tree(self.G, algorithm=self.algo) + actual = sorted(T.edges(data=True)) + assert edges_equal(actual, self.minimum_spanning_edgelist) + + def test_maximum_tree(self): + T = nx.maximum_spanning_tree(self.G, algorithm=self.algo) + actual = sorted(T.edges(data=True)) + assert edges_equal(actual, self.maximum_spanning_edgelist) + + def test_disconnected(self): + G = nx.Graph([(0, 1, {"weight": 1}), (2, 3, {"weight": 2})]) + T = nx.minimum_spanning_tree(G, algorithm=self.algo) + assert nodes_equal(list(T), list(range(4))) + assert edges_equal(list(T.edges()), [(0, 1), (2, 3)]) + + def test_empty_graph(self): + G = nx.empty_graph(3) + T = nx.minimum_spanning_tree(G, algorithm=self.algo) + assert nodes_equal(sorted(T), list(range(3))) + assert T.number_of_edges() == 0 + + def test_attributes(self): + G = nx.Graph() + G.add_edge(1, 2, weight=1, color="red", distance=7) + G.add_edge(2, 3, weight=1, color="green", distance=2) + G.add_edge(1, 3, weight=10, color="blue", distance=1) + G.graph["foo"] = "bar" + T = nx.minimum_spanning_tree(G, algorithm=self.algo) + assert T.graph == G.graph + assert nodes_equal(T, G) + for u, v in T.edges(): + assert T.adj[u][v] == G.adj[u][v] + + def test_weight_attribute(self): + G = nx.Graph() + G.add_edge(0, 1, weight=1, distance=7) + G.add_edge(0, 2, weight=30, distance=1) + G.add_edge(1, 2, weight=1, distance=1) + G.add_node(3) + T = nx.minimum_spanning_tree(G, algorithm=self.algo, weight="distance") + assert nodes_equal(sorted(T), list(range(4))) + assert edges_equal(sorted(T.edges()), [(0, 2), (1, 2)]) + T = nx.maximum_spanning_tree(G, algorithm=self.algo, weight="distance") + assert nodes_equal(sorted(T), list(range(4))) + assert edges_equal(sorted(T.edges()), [(0, 1), (0, 2)]) + + +class TestBoruvka(MinimumSpanningTreeTestBase): + """Unit tests for computing a minimum (or maximum) spanning tree + using Borůvka's algorithm. + """ + + algorithm = "boruvka" + + def test_unicode_name(self): + """Tests that using a Unicode string can correctly indicate + Borůvka's algorithm. + """ + edges = nx.minimum_spanning_edges(self.G, algorithm="borůvka") + # Edges from the spanning edges functions don't come in sorted + # orientation, so we need to sort each edge individually. + actual = sorted((min(u, v), max(u, v), d) for u, v, d in edges) + assert edges_equal(actual, self.minimum_spanning_edgelist) + + +class MultigraphMSTTestBase(MinimumSpanningTreeTestBase): + # Abstract class + + def test_multigraph_keys_min(self): + """Tests that the minimum spanning edges of a multigraph + preserves edge keys. + """ + G = nx.MultiGraph() + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) + min_edges = nx.minimum_spanning_edges + mst_edges = min_edges(G, algorithm=self.algo, data=False) + assert edges_equal([(0, 1, "b")], list(mst_edges)) + + def test_multigraph_keys_max(self): + """Tests that the maximum spanning edges of a multigraph + preserves edge keys. + """ + G = nx.MultiGraph() + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) + max_edges = nx.maximum_spanning_edges + mst_edges = max_edges(G, algorithm=self.algo, data=False) + assert edges_equal([(0, 1, "a")], list(mst_edges)) + + +class TestKruskal(MultigraphMSTTestBase): + """Unit tests for computing a minimum (or maximum) spanning tree + using Kruskal's algorithm. + """ + + algorithm = "kruskal" + + def test_key_data_bool(self): + """Tests that the keys and data values are included in + MST edges based on whether keys and data parameters are + true or false""" + G = nx.MultiGraph() + G.add_edge(1, 2, key=1, weight=2) + G.add_edge(1, 2, key=2, weight=3) + G.add_edge(3, 2, key=1, weight=2) + G.add_edge(3, 1, key=1, weight=4) + + # keys are included and data is not included + mst_edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, keys=True, data=False + ) + assert edges_equal([(1, 2, 1), (2, 3, 1)], list(mst_edges)) + + # keys are not included and data is included + mst_edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, keys=False, data=True + ) + assert edges_equal( + [(1, 2, {"weight": 2}), (2, 3, {"weight": 2})], list(mst_edges) + ) + + # both keys and data are not included + mst_edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, keys=False, data=False + ) + assert edges_equal([(1, 2), (2, 3)], list(mst_edges)) + + # both keys and data are included + mst_edges = nx.minimum_spanning_edges( + G, algorithm=self.algo, keys=True, data=True + ) + assert edges_equal( + [(1, 2, 1, {"weight": 2}), (2, 3, 1, {"weight": 2})], list(mst_edges) + ) + + +class TestPrim(MultigraphMSTTestBase): + """Unit tests for computing a minimum (or maximum) spanning tree + using Prim's algorithm. + """ + + algorithm = "prim" + + def test_prim_mst_edges_simple_graph(self): + H = nx.Graph() + H.add_edge(1, 2, key=2, weight=3) + H.add_edge(3, 2, key=1, weight=2) + H.add_edge(3, 1, key=1, weight=4) + + mst_edges = nx.minimum_spanning_edges(H, algorithm=self.algo, ignore_nan=True) + assert edges_equal( + [(1, 2, {"key": 2, "weight": 3}), (2, 3, {"key": 1, "weight": 2})], + list(mst_edges), + ) + + def test_ignore_nan(self): + """Tests that the edges with NaN weights are ignored or + raise an Error based on ignore_nan is true or false""" + H = nx.MultiGraph() + H.add_edge(1, 2, key=1, weight=float("nan")) + H.add_edge(1, 2, key=2, weight=3) + H.add_edge(3, 2, key=1, weight=2) + H.add_edge(3, 1, key=1, weight=4) + + # NaN weight edges are ignored when ignore_nan=True + mst_edges = nx.minimum_spanning_edges(H, algorithm=self.algo, ignore_nan=True) + assert edges_equal( + [(1, 2, 2, {"weight": 3}), (2, 3, 1, {"weight": 2})], list(mst_edges) + ) + + # NaN weight edges raise Error when ignore_nan=False + with pytest.raises(ValueError): + list(nx.minimum_spanning_edges(H, algorithm=self.algo, ignore_nan=False)) + + def test_multigraph_keys_tree(self): + G = nx.MultiGraph() + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) + T = nx.minimum_spanning_tree(G, algorithm=self.algo) + assert edges_equal([(0, 1, 1)], list(T.edges(data="weight"))) + + def test_multigraph_keys_tree_max(self): + G = nx.MultiGraph() + G.add_edge(0, 1, key="a", weight=2) + G.add_edge(0, 1, key="b", weight=1) + T = nx.maximum_spanning_tree(G, algorithm=self.algo) + assert edges_equal([(0, 1, 2)], list(T.edges(data="weight"))) + + +class TestSpanningTreeIterator: + """ + Tests the spanning tree iterator on the example graph in the 2005 Sörensen + and Janssens paper An Algorithm to Generate all Spanning Trees of a Graph in + Order of Increasing Cost + """ + + def setup_method(self): + # Original Graph + edges = [(0, 1, 5), (1, 2, 4), (1, 4, 6), (2, 3, 5), (2, 4, 7), (3, 4, 3)] + self.G = nx.Graph() + self.G.add_weighted_edges_from(edges) + # List of lists of spanning trees in increasing order + self.spanning_trees = [ + # 1, MST, cost = 17 + [ + (0, 1, {"weight": 5}), + (1, 2, {"weight": 4}), + (2, 3, {"weight": 5}), + (3, 4, {"weight": 3}), + ], + # 2, cost = 18 + [ + (0, 1, {"weight": 5}), + (1, 2, {"weight": 4}), + (1, 4, {"weight": 6}), + (3, 4, {"weight": 3}), + ], + # 3, cost = 19 + [ + (0, 1, {"weight": 5}), + (1, 4, {"weight": 6}), + (2, 3, {"weight": 5}), + (3, 4, {"weight": 3}), + ], + # 4, cost = 19 + [ + (0, 1, {"weight": 5}), + (1, 2, {"weight": 4}), + (2, 4, {"weight": 7}), + (3, 4, {"weight": 3}), + ], + # 5, cost = 20 + [ + (0, 1, {"weight": 5}), + (1, 2, {"weight": 4}), + (1, 4, {"weight": 6}), + (2, 3, {"weight": 5}), + ], + # 6, cost = 21 + [ + (0, 1, {"weight": 5}), + (1, 4, {"weight": 6}), + (2, 4, {"weight": 7}), + (3, 4, {"weight": 3}), + ], + # 7, cost = 21 + [ + (0, 1, {"weight": 5}), + (1, 2, {"weight": 4}), + (2, 3, {"weight": 5}), + (2, 4, {"weight": 7}), + ], + # 8, cost = 23 + [ + (0, 1, {"weight": 5}), + (1, 4, {"weight": 6}), + (2, 3, {"weight": 5}), + (2, 4, {"weight": 7}), + ], + ] + + def test_minimum_spanning_tree_iterator(self): + """ + Tests that the spanning trees are correctly returned in increasing order + """ + tree_index = 0 + for tree in nx.SpanningTreeIterator(self.G): + actual = sorted(tree.edges(data=True)) + assert edges_equal(actual, self.spanning_trees[tree_index]) + tree_index += 1 + + def test_maximum_spanning_tree_iterator(self): + """ + Tests that the spanning trees are correctly returned in decreasing order + """ + tree_index = 7 + for tree in nx.SpanningTreeIterator(self.G, minimum=False): + actual = sorted(tree.edges(data=True)) + assert edges_equal(actual, self.spanning_trees[tree_index]) + tree_index -= 1 + + +class TestSpanningTreeMultiGraphIterator: + """ + Uses the same graph as the above class but with an added edge of twice the weight. + """ + + def setup_method(self): + # New graph + edges = [ + (0, 1, 5), + (0, 1, 10), + (1, 2, 4), + (1, 2, 8), + (1, 4, 6), + (1, 4, 12), + (2, 3, 5), + (2, 3, 10), + (2, 4, 7), + (2, 4, 14), + (3, 4, 3), + (3, 4, 6), + ] + self.G = nx.MultiGraph() + self.G.add_weighted_edges_from(edges) + + # There are 128 trees. I'd rather not list all 128 here, and computing them + # on such a small graph actually doesn't take that long. + from itertools import combinations + + self.spanning_trees = [] + for e in combinations(self.G.edges, 4): + tree = self.G.edge_subgraph(e) + if nx.is_tree(tree): + self.spanning_trees.append(sorted(tree.edges(keys=True, data=True))) + + def test_minimum_spanning_tree_iterator_multigraph(self): + """ + Tests that the spanning trees are correctly returned in increasing order + """ + tree_index = 0 + last_weight = 0 + for tree in nx.SpanningTreeIterator(self.G): + actual = sorted(tree.edges(keys=True, data=True)) + weight = sum([e[3]["weight"] for e in actual]) + assert actual in self.spanning_trees + assert weight >= last_weight + tree_index += 1 + + def test_maximum_spanning_tree_iterator_multigraph(self): + """ + Tests that the spanning trees are correctly returned in decreasing order + """ + tree_index = 127 + # Maximum weight tree is 46 + last_weight = 50 + for tree in nx.SpanningTreeIterator(self.G, minimum=False): + actual = sorted(tree.edges(keys=True, data=True)) + weight = sum([e[3]["weight"] for e in actual]) + assert actual in self.spanning_trees + assert weight <= last_weight + tree_index -= 1 + + +def test_random_spanning_tree_multiplicative_small(): + """ + Using a fixed seed, sample one tree for repeatability. + """ + from math import exp + + pytest.importorskip("scipy") + + gamma = { + (0, 1): -0.6383, + (0, 2): -0.6827, + (0, 5): 0, + (1, 2): -1.0781, + (1, 4): 0, + (2, 3): 0, + (5, 3): -0.2820, + (5, 4): -0.3327, + (4, 3): -0.9927, + } + + # The undirected support of gamma + G = nx.Graph() + for u, v in gamma: + G.add_edge(u, v, lambda_key=exp(gamma[(u, v)])) + + solution_edges = [(2, 3), (3, 4), (0, 5), (5, 4), (4, 1)] + solution = nx.Graph() + solution.add_edges_from(solution_edges) + + sampled_tree = nx.random_spanning_tree(G, "lambda_key", seed=42) + + assert nx.utils.edges_equal(solution.edges, sampled_tree.edges) + + +@pytest.mark.slow +def test_random_spanning_tree_multiplicative_large(): + """ + Sample many trees from the distribution created in the last test + """ + from math import exp + from random import Random + + pytest.importorskip("numpy") + stats = pytest.importorskip("scipy.stats") + + gamma = { + (0, 1): -0.6383, + (0, 2): -0.6827, + (0, 5): 0, + (1, 2): -1.0781, + (1, 4): 0, + (2, 3): 0, + (5, 3): -0.2820, + (5, 4): -0.3327, + (4, 3): -0.9927, + } + + # The undirected support of gamma + G = nx.Graph() + for u, v in gamma: + G.add_edge(u, v, lambda_key=exp(gamma[(u, v)])) + + # Find the multiplicative weight for each tree. + total_weight = 0 + tree_expected = {} + for t in nx.SpanningTreeIterator(G): + # Find the multiplicative weight of the spanning tree + weight = 1 + for u, v, d in t.edges(data="lambda_key"): + weight *= d + tree_expected[t] = weight + total_weight += weight + + # Assert that every tree has an entry in the expected distribution + assert len(tree_expected) == 75 + + # Set the sample size and then calculate the expected number of times we + # expect to see each tree. This test uses a near minimum sample size where + # the most unlikely tree has an expected frequency of 5.15. + # (Minimum required is 5) + # + # Here we also initialize the tree_actual dict so that we know the keys + # match between the two. We will later take advantage of the fact that since + # python 3.7 dict order is guaranteed so the expected and actual data will + # have the same order. + sample_size = 1200 + tree_actual = {} + for t in tree_expected: + tree_expected[t] = (tree_expected[t] / total_weight) * sample_size + tree_actual[t] = 0 + + # Sample the spanning trees + # + # Assert that they are actually trees and record which of the 75 trees we + # have sampled. + # + # For repeatability, we want to take advantage of the decorators in NetworkX + # to randomly sample the same sample each time. However, if we pass in a + # constant seed to sample_spanning_tree we will get the same tree each time. + # Instead, we can create our own random number generator with a fixed seed + # and pass those into sample_spanning_tree. + rng = Random(37) + for _ in range(sample_size): + sampled_tree = nx.random_spanning_tree(G, "lambda_key", seed=rng) + assert nx.is_tree(sampled_tree) + + for t in tree_expected: + if nx.utils.edges_equal(t.edges, sampled_tree.edges): + tree_actual[t] += 1 + break + + # Conduct a Chi squared test to see if the actual distribution matches the + # expected one at an alpha = 0.05 significance level. + # + # H_0: The distribution of trees in tree_actual matches the normalized product + # of the edge weights in the tree. + # + # H_a: The distribution of trees in tree_actual follows some other + # distribution of spanning trees. + _, p = stats.chisquare(list(tree_actual.values()), list(tree_expected.values())) + + # Assert that p is greater than the significance level so that we do not + # reject the null hypothesis + assert not p < 0.05 + + +def test_random_spanning_tree_additive_small(): + """ + Sample a single spanning tree from the additive method. + """ + pytest.importorskip("scipy") + + edges = { + (0, 1): 1, + (0, 2): 1, + (0, 5): 3, + (1, 2): 2, + (1, 4): 3, + (2, 3): 3, + (5, 3): 4, + (5, 4): 5, + (4, 3): 4, + } + + # Build the graph + G = nx.Graph() + for u, v in edges: + G.add_edge(u, v, weight=edges[(u, v)]) + + solution_edges = [(0, 2), (1, 2), (2, 3), (3, 4), (3, 5)] + solution = nx.Graph() + solution.add_edges_from(solution_edges) + + sampled_tree = nx.random_spanning_tree( + G, weight="weight", multiplicative=False, seed=37 + ) + + assert nx.utils.edges_equal(solution.edges, sampled_tree.edges) + + +@pytest.mark.slow +def test_random_spanning_tree_additive_large(): + """ + Sample many spanning trees from the additive method. + """ + from random import Random + + pytest.importorskip("numpy") + stats = pytest.importorskip("scipy.stats") + + edges = { + (0, 1): 1, + (0, 2): 1, + (0, 5): 3, + (1, 2): 2, + (1, 4): 3, + (2, 3): 3, + (5, 3): 4, + (5, 4): 5, + (4, 3): 4, + } + + # Build the graph + G = nx.Graph() + for u, v in edges: + G.add_edge(u, v, weight=edges[(u, v)]) + + # Find the additive weight for each tree. + total_weight = 0 + tree_expected = {} + for t in nx.SpanningTreeIterator(G): + # Find the multiplicative weight of the spanning tree + weight = 0 + for u, v, d in t.edges(data="weight"): + weight += d + tree_expected[t] = weight + total_weight += weight + + # Assert that every tree has an entry in the expected distribution + assert len(tree_expected) == 75 + + # Set the sample size and then calculate the expected number of times we + # expect to see each tree. This test uses a near minimum sample size where + # the most unlikely tree has an expected frequency of 5.07. + # (Minimum required is 5) + # + # Here we also initialize the tree_actual dict so that we know the keys + # match between the two. We will later take advantage of the fact that since + # python 3.7 dict order is guaranteed so the expected and actual data will + # have the same order. + sample_size = 500 + tree_actual = {} + for t in tree_expected: + tree_expected[t] = (tree_expected[t] / total_weight) * sample_size + tree_actual[t] = 0 + + # Sample the spanning trees + # + # Assert that they are actually trees and record which of the 75 trees we + # have sampled. + # + # For repeatability, we want to take advantage of the decorators in NetworkX + # to randomly sample the same sample each time. However, if we pass in a + # constant seed to sample_spanning_tree we will get the same tree each time. + # Instead, we can create our own random number generator with a fixed seed + # and pass those into sample_spanning_tree. + rng = Random(37) + for _ in range(sample_size): + sampled_tree = nx.random_spanning_tree( + G, "weight", multiplicative=False, seed=rng + ) + assert nx.is_tree(sampled_tree) + + for t in tree_expected: + if nx.utils.edges_equal(t.edges, sampled_tree.edges): + tree_actual[t] += 1 + break + + # Conduct a Chi squared test to see if the actual distribution matches the + # expected one at an alpha = 0.05 significance level. + # + # H_0: The distribution of trees in tree_actual matches the normalized product + # of the edge weights in the tree. + # + # H_a: The distribution of trees in tree_actual follows some other + # distribution of spanning trees. + _, p = stats.chisquare(list(tree_actual.values()), list(tree_expected.values())) + + # Assert that p is greater than the significance level so that we do not + # reject the null hypothesis + assert not p < 0.05 + + +def test_random_spanning_tree_empty_graph(): + G = nx.Graph() + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 0 + assert len(rst.edges) == 0 + + +def test_random_spanning_tree_single_node_graph(): + G = nx.Graph() + G.add_node(0) + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 1 + assert len(rst.edges) == 0 + + +def test_random_spanning_tree_single_node_loop(): + G = nx.Graph() + G.add_node(0) + G.add_edge(0, 0) + rst = nx.tree.random_spanning_tree(G) + assert len(rst.nodes) == 1 + assert len(rst.edges) == 0 + + +class TestNumberSpanningTrees: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + sp = pytest.importorskip("scipy") + + def test_nst_disconnected(self): + G = nx.empty_graph(2) + assert np.isclose(nx.number_of_spanning_trees(G), 0) + + def test_nst_no_nodes(self): + G = nx.Graph() + with pytest.raises(nx.NetworkXPointlessConcept): + nx.number_of_spanning_trees(G) + + def test_nst_weight(self): + G = nx.Graph() + G.add_edge(1, 2, weight=1) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=2) + # weights are ignored + assert np.isclose(nx.number_of_spanning_trees(G), 3) + # including weight + assert np.isclose(nx.number_of_spanning_trees(G, weight="weight"), 5) + + def test_nst_negative_weight(self): + G = nx.Graph() + G.add_edge(1, 2, weight=1) + G.add_edge(1, 3, weight=-1) + G.add_edge(2, 3, weight=-2) + # weights are ignored + assert np.isclose(nx.number_of_spanning_trees(G), 3) + # including weight + assert np.isclose(nx.number_of_spanning_trees(G, weight="weight"), -1) + + def test_nst_selfloop(self): + # self-loops are ignored + G = nx.complete_graph(3) + G.add_edge(1, 1) + assert np.isclose(nx.number_of_spanning_trees(G), 3) + + def test_nst_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2) + G.add_edge(1, 2) + G.add_edge(1, 3) + G.add_edge(2, 3) + assert np.isclose(nx.number_of_spanning_trees(G), 5) + + def test_nst_complete_graph(self): + # this is known as Cayley's formula + N = 5 + G = nx.complete_graph(N) + assert np.isclose(nx.number_of_spanning_trees(G), N ** (N - 2)) + + def test_nst_path_graph(self): + G = nx.path_graph(5) + assert np.isclose(nx.number_of_spanning_trees(G), 1) + + def test_nst_cycle_graph(self): + G = nx.cycle_graph(5) + assert np.isclose(nx.number_of_spanning_trees(G), 5) + + def test_nst_directed_noroot(self): + G = nx.empty_graph(3, create_using=nx.MultiDiGraph) + with pytest.raises(nx.NetworkXError): + nx.number_of_spanning_trees(G) + + def test_nst_directed_root_not_exist(self): + G = nx.empty_graph(3, create_using=nx.MultiDiGraph) + with pytest.raises(nx.NetworkXError): + nx.number_of_spanning_trees(G, root=42) + + def test_nst_directed_not_weak_connected(self): + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(3, 4) + assert np.isclose(nx.number_of_spanning_trees(G, root=1), 0) + + def test_nst_directed_cycle_graph(self): + G = nx.DiGraph() + G = nx.cycle_graph(7, G) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 1) + + def test_nst_directed_complete_graph(self): + G = nx.DiGraph() + G = nx.complete_graph(7, G) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 7**5) + + def test_nst_directed_multi(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.add_edge(1, 2) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 2) + + def test_nst_directed_selfloop(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.add_edge(1, 1) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 1) + + def test_nst_directed_weak_connected(self): + G = nx.MultiDiGraph() + G = nx.cycle_graph(3, G) + G.remove_edge(1, 2) + assert np.isclose(nx.number_of_spanning_trees(G, root=0), 0) + + def test_nst_directed_weighted(self): + # from root=1: + # arborescence 1: 1->2, 1->3, weight=2*1 + # arborescence 2: 1->2, 2->3, weight=2*3 + G = nx.DiGraph() + G.add_edge(1, 2, weight=2) + G.add_edge(1, 3, weight=1) + G.add_edge(2, 3, weight=3) + Nst = nx.number_of_spanning_trees(G, root=1, weight="weight") + assert np.isclose(Nst, 8) + Nst = nx.number_of_spanning_trees(G, root=2, weight="weight") + assert np.isclose(Nst, 0) + Nst = nx.number_of_spanning_trees(G, root=3, weight="weight") + assert np.isclose(Nst, 0) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_operations.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_operations.py new file mode 100644 index 0000000..284d94e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_operations.py @@ -0,0 +1,53 @@ +from itertools import chain + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +def _check_custom_label_attribute(input_trees, res_tree, label_attribute): + res_attr_dict = nx.get_node_attributes(res_tree, label_attribute) + res_attr_set = set(res_attr_dict.values()) + input_label = (tree for tree, root in input_trees) + input_label_set = set(chain.from_iterable(input_label)) + return res_attr_set == input_label_set + + +def test_empty_sequence(): + """Joining the empty sequence results in the tree with one node.""" + T = nx.join_trees([]) + assert len(T) == 1 + assert T.number_of_edges() == 0 + + +def test_single(): + """Joining just one tree yields a tree with one more node.""" + T = nx.empty_graph(1) + trees = [(T, 0)] + actual_with_label = nx.join_trees(trees, label_attribute="custom_label") + expected = nx.path_graph(2) + assert nodes_equal(list(expected), list(actual_with_label)) + assert edges_equal(list(expected.edges()), list(actual_with_label.edges())) + + +def test_basic(): + """Joining multiple subtrees at a root node.""" + trees = [(nx.full_rary_tree(2, 2**2 - 1), 0) for i in range(2)] + expected = nx.full_rary_tree(2, 2**3 - 1) + actual = nx.join_trees(trees, label_attribute="old_labels") + assert nx.is_isomorphic(actual, expected) + assert _check_custom_label_attribute(trees, actual, "old_labels") + + actual_without_label = nx.join_trees(trees) + assert nx.is_isomorphic(actual_without_label, expected) + # check that no labels were stored + assert all(not data for _, data in actual_without_label.nodes(data=True)) + + +def test_first_label(): + """Test the functionality of the first_label argument.""" + T1 = nx.path_graph(3) + T2 = nx.path_graph(2) + actual = nx.join_trees([(T1, 0), (T2, 0)], first_label=10) + expected_nodes = set(range(10, 16)) + assert set(actual.nodes()) == expected_nodes + assert set(actual.neighbors(10)) == {11, 14} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_recognition.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_recognition.py new file mode 100644 index 0000000..105f5a8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/tree/tests/test_recognition.py @@ -0,0 +1,174 @@ +import pytest + +import networkx as nx + + +class TestTreeRecognition: + graph = nx.Graph + multigraph = nx.MultiGraph + + @classmethod + def setup_class(cls): + cls.T1 = cls.graph() + + cls.T2 = cls.graph() + cls.T2.add_node(1) + + cls.T3 = cls.graph() + cls.T3.add_nodes_from(range(5)) + edges = [(i, i + 1) for i in range(4)] + cls.T3.add_edges_from(edges) + + cls.T5 = cls.multigraph() + cls.T5.add_nodes_from(range(5)) + edges = [(i, i + 1) for i in range(4)] + cls.T5.add_edges_from(edges) + + cls.T6 = cls.graph() + cls.T6.add_nodes_from([6, 7]) + cls.T6.add_edge(6, 7) + + cls.F1 = nx.compose(cls.T6, cls.T3) + + cls.N4 = cls.graph() + cls.N4.add_node(1) + cls.N4.add_edge(1, 1) + + cls.N5 = cls.graph() + cls.N5.add_nodes_from(range(5)) + + cls.N6 = cls.graph() + cls.N6.add_nodes_from(range(3)) + cls.N6.add_edges_from([(0, 1), (1, 2), (2, 0)]) + + cls.NF1 = nx.compose(cls.T6, cls.N6) + + def test_null_tree(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.is_tree(self.graph()) + + def test_null_tree2(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.is_tree(self.multigraph()) + + def test_null_forest(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.is_forest(self.graph()) + + def test_null_forest2(self): + with pytest.raises(nx.NetworkXPointlessConcept): + nx.is_forest(self.multigraph()) + + def test_is_tree(self): + assert nx.is_tree(self.T2) + assert nx.is_tree(self.T3) + assert nx.is_tree(self.T5) + + def test_is_not_tree(self): + assert not nx.is_tree(self.N4) + assert not nx.is_tree(self.N5) + assert not nx.is_tree(self.N6) + + def test_is_forest(self): + assert nx.is_forest(self.T2) + assert nx.is_forest(self.T3) + assert nx.is_forest(self.T5) + assert nx.is_forest(self.F1) + assert nx.is_forest(self.N5) + + def test_is_not_forest(self): + assert not nx.is_forest(self.N4) + assert not nx.is_forest(self.N6) + assert not nx.is_forest(self.NF1) + + +class TestDirectedTreeRecognition(TestTreeRecognition): + graph = nx.DiGraph + multigraph = nx.MultiDiGraph + + +def test_disconnected_graph(): + # https://github.com/networkx/networkx/issues/1144 + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (2, 0), (3, 4)]) + assert not nx.is_tree(G) + + G = nx.DiGraph() + G.add_edges_from([(0, 1), (1, 2), (2, 0), (3, 4)]) + assert not nx.is_tree(G) + + +def test_dag_nontree(): + G = nx.DiGraph() + G.add_edges_from([(0, 1), (0, 2), (1, 2)]) + assert not nx.is_tree(G) + assert nx.is_directed_acyclic_graph(G) + + +def test_multicycle(): + G = nx.MultiDiGraph() + G.add_edges_from([(0, 1), (0, 1)]) + assert not nx.is_tree(G) + assert nx.is_directed_acyclic_graph(G) + + +def test_emptybranch(): + G = nx.DiGraph() + G.add_nodes_from(range(10)) + assert nx.is_branching(G) + assert not nx.is_arborescence(G) + + +def test_is_branching_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="G has no nodes."): + nx.is_branching(G) + + +def test_path(): + G = nx.DiGraph() + nx.add_path(G, range(5)) + assert nx.is_branching(G) + assert nx.is_arborescence(G) + + +def test_notbranching1(): + # Acyclic violation. + G = nx.MultiDiGraph() + G.add_nodes_from(range(10)) + G.add_edges_from([(0, 1), (1, 0)]) + assert not nx.is_branching(G) + assert not nx.is_arborescence(G) + + +def test_notbranching2(): + # In-degree violation. + G = nx.MultiDiGraph() + G.add_nodes_from(range(10)) + G.add_edges_from([(0, 1), (0, 2), (3, 2)]) + assert not nx.is_branching(G) + assert not nx.is_arborescence(G) + + +def test_notarborescence1(): + # Not an arborescence due to not spanning. + G = nx.MultiDiGraph() + G.add_nodes_from(range(10)) + G.add_edges_from([(0, 1), (0, 2), (1, 3), (5, 6)]) + assert nx.is_branching(G) + assert not nx.is_arborescence(G) + + +def test_notarborescence2(): + # Not an arborescence due to in-degree violation. + G = nx.MultiDiGraph() + nx.add_path(G, range(5)) + G.add_edge(6, 4) + assert not nx.is_branching(G) + assert not nx.is_arborescence(G) + + +def test_is_arborescense_empty_graph_raises(): + G = nx.DiGraph() + with pytest.raises(nx.NetworkXPointlessConcept, match="G has no nodes."): + nx.is_arborescence(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/triads.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/triads.py new file mode 100644 index 0000000..2442d6f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/triads.py @@ -0,0 +1,500 @@ +# See https://github.com/networkx/networkx/pull/1474 +# Copyright 2011 Reya Group +# Copyright 2011 Alex Levenson +# Copyright 2011 Diederik van Liere +"""Functions for analyzing triads of a graph.""" + +from collections import defaultdict +from itertools import combinations, permutations + +import networkx as nx +from networkx.utils import not_implemented_for, py_random_state + +__all__ = [ + "triadic_census", + "is_triad", + "all_triads", + "triads_by_type", + "triad_type", +] + +#: The integer codes representing each type of triad. +#: +#: Triads that are the same up to symmetry have the same code. +TRICODES = ( + 1, + 2, + 2, + 3, + 2, + 4, + 6, + 8, + 2, + 6, + 5, + 7, + 3, + 8, + 7, + 11, + 2, + 6, + 4, + 8, + 5, + 9, + 9, + 13, + 6, + 10, + 9, + 14, + 7, + 14, + 12, + 15, + 2, + 5, + 6, + 7, + 6, + 9, + 10, + 14, + 4, + 9, + 9, + 12, + 8, + 13, + 14, + 15, + 3, + 7, + 8, + 11, + 7, + 12, + 14, + 15, + 8, + 14, + 13, + 15, + 11, + 15, + 15, + 16, +) + +#: The names of each type of triad. The order of the elements is +#: important: it corresponds to the tricodes given in :data:`TRICODES`. +TRIAD_NAMES = ( + "003", + "012", + "102", + "021D", + "021U", + "021C", + "111D", + "111U", + "030T", + "030C", + "201", + "120D", + "120U", + "120C", + "210", + "300", +) + + +#: A dictionary mapping triad code to triad name. +TRICODE_TO_NAME = {i: TRIAD_NAMES[code - 1] for i, code in enumerate(TRICODES)} + + +def _tricode(G, v, u, w): + """Returns the integer code of the given triad. + + This is some fancy magic that comes from Batagelj and Mrvar's paper. It + treats each edge joining a pair of `v`, `u`, and `w` as a bit in + the binary representation of an integer. + + """ + combos = ((v, u, 1), (u, v, 2), (v, w, 4), (w, v, 8), (u, w, 16), (w, u, 32)) + return sum(x for u, v, x in combos if v in G[u]) + + +@not_implemented_for("undirected") +@nx._dispatchable +def triadic_census(G, nodelist=None): + """Determines the triadic census of a directed graph. + + The triadic census is a count of how many of the 16 possible types of + triads are present in a directed graph. If a list of nodes is passed, then + only those triads are taken into account which have elements of nodelist in them. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + nodelist : list + List of nodes for which you want to calculate triadic census + + Returns + ------- + census : dict + Dictionary with triad type as keys and number of occurrences as values. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)]) + >>> triadic_census = nx.triadic_census(G) + >>> for key, value in triadic_census.items(): + ... print(f"{key}: {value}") + 003: 0 + 012: 0 + 102: 0 + 021D: 0 + 021U: 0 + 021C: 0 + 111D: 0 + 111U: 0 + 030T: 2 + 030C: 2 + 201: 0 + 120D: 0 + 120U: 0 + 120C: 0 + 210: 0 + 300: 0 + + Notes + ----- + This algorithm has complexity $O(m)$ where $m$ is the number of edges in + the graph. + + For undirected graphs, the triadic census can be computed by first converting + the graph into a directed graph using the ``G.to_directed()`` method. + After this conversion, only the triad types 003, 102, 201 and 300 will be + present in the undirected scenario. + + Raises + ------ + ValueError + If `nodelist` contains duplicate nodes or nodes not in `G`. + If you want to ignore this you can preprocess with `set(nodelist) & G.nodes` + + See also + -------- + triad_graph + + References + ---------- + .. [1] Vladimir Batagelj and Andrej Mrvar, A subquadratic triad census + algorithm for large sparse networks with small maximum degree, + University of Ljubljana, + http://vlado.fmf.uni-lj.si/pub/networks/doc/triads/triads.pdf + + """ + nodeset = set(G.nbunch_iter(nodelist)) + if nodelist is not None and len(nodelist) != len(nodeset): + raise ValueError("nodelist includes duplicate nodes or nodes not in G") + + N = len(G) + Nnot = N - len(nodeset) # can signal special counting for subset of nodes + + # create an ordering of nodes with nodeset nodes first + m = {n: i for i, n in enumerate(nodeset)} + if Nnot: + # add non-nodeset nodes later in the ordering + not_nodeset = G.nodes - nodeset + m.update((n, i + N) for i, n in enumerate(not_nodeset)) + + # build all_neighbor dicts for easy counting + # After Python 3.8 can leave off these keys(). Speedup also using G._pred + # nbrs = {n: G._pred[n].keys() | G._succ[n].keys() for n in G} + nbrs = {n: G.pred[n].keys() | G.succ[n].keys() for n in G} + dbl_nbrs = {n: G.pred[n].keys() & G.succ[n].keys() for n in G} + + if Nnot: + sgl_nbrs = {n: G.pred[n].keys() ^ G.succ[n].keys() for n in not_nodeset} + # find number of edges not incident to nodes in nodeset + sgl = sum(1 for n in not_nodeset for nbr in sgl_nbrs[n] if nbr not in nodeset) + sgl_edges_outside = sgl // 2 + dbl = sum(1 for n in not_nodeset for nbr in dbl_nbrs[n] if nbr not in nodeset) + dbl_edges_outside = dbl // 2 + + # Initialize the count for each triad to be zero. + census = dict.fromkeys(TRIAD_NAMES, 0) + # Main loop over nodes + for v in nodeset: + vnbrs = nbrs[v] + dbl_vnbrs = dbl_nbrs[v] + if Nnot: + # set up counts of edges attached to v. + sgl_unbrs_bdy = sgl_unbrs_out = dbl_unbrs_bdy = dbl_unbrs_out = 0 + for u in vnbrs: + if m[u] <= m[v]: + continue + unbrs = nbrs[u] + neighbors = (vnbrs | unbrs) - {u, v} + # Count connected triads. + for w in neighbors: + if m[u] < m[w] or (m[v] < m[w] < m[u] and v not in nbrs[w]): + code = _tricode(G, v, u, w) + census[TRICODE_TO_NAME[code]] += 1 + + # Use a formula for dyadic triads with edge incident to v + if u in dbl_vnbrs: + census["102"] += N - len(neighbors) - 2 + else: + census["012"] += N - len(neighbors) - 2 + + # Count edges attached to v. Subtract later to get triads with v isolated + # _out are (u,unbr) for unbrs outside boundary of nodeset + # _bdy are (u,unbr) for unbrs on boundary of nodeset (get double counted) + if Nnot and u not in nodeset: + sgl_unbrs = sgl_nbrs[u] + sgl_unbrs_bdy += len(sgl_unbrs & vnbrs - nodeset) + sgl_unbrs_out += len(sgl_unbrs - vnbrs - nodeset) + dbl_unbrs = dbl_nbrs[u] + dbl_unbrs_bdy += len(dbl_unbrs & vnbrs - nodeset) + dbl_unbrs_out += len(dbl_unbrs - vnbrs - nodeset) + # if nodeset == G.nodes, skip this b/c we will find the edge later. + if Nnot: + # Count edges outside nodeset not connected with v (v isolated triads) + census["012"] += sgl_edges_outside - (sgl_unbrs_out + sgl_unbrs_bdy // 2) + census["102"] += dbl_edges_outside - (dbl_unbrs_out + dbl_unbrs_bdy // 2) + + # calculate null triads: "003" + # null triads = total number of possible triads - all found triads + total_triangles = (N * (N - 1) * (N - 2)) // 6 + triangles_without_nodeset = (Nnot * (Nnot - 1) * (Nnot - 2)) // 6 + total_census = total_triangles - triangles_without_nodeset + census["003"] = total_census - sum(census.values()) + + return census + + +@nx._dispatchable +def is_triad(G): + """Returns True if the graph G is a triad, else False. + + Parameters + ---------- + G : graph + A NetworkX Graph + + Returns + ------- + istriad : boolean + Whether G is a valid triad + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + >>> nx.is_triad(G) + True + >>> G.add_edge(0, 1) + >>> nx.is_triad(G) + False + """ + if isinstance(G, nx.Graph): + if G.order() == 3 and nx.is_directed(G): + if not any((n, n) in G.edges() for n in G.nodes()): + return True + return False + + +@not_implemented_for("undirected") +@nx._dispatchable(returns_graph=True) +def all_triads(G): + """A generator of all possible triads in G. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + all_triads : generator of DiGraphs + Generator of triads (order-3 DiGraphs) + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1), (3, 4), (4, 1), (4, 2)]) + >>> for triad in nx.all_triads(G): + ... print(triad.edges) + [(1, 2), (2, 3), (3, 1)] + [(1, 2), (4, 1), (4, 2)] + [(3, 1), (3, 4), (4, 1)] + [(2, 3), (3, 4), (4, 2)] + + """ + triplets = combinations(G.nodes(), 3) + for triplet in triplets: + yield G.subgraph(triplet).copy() + + +@not_implemented_for("undirected") +@nx._dispatchable +def triads_by_type(G): + """Returns a list of all triads for each triad type in a directed graph. + There are exactly 16 different types of triads possible. Suppose 1, 2, 3 are three + nodes, they will be classified as a particular triad type if their connections + are as follows: + + - 003: 1, 2, 3 + - 012: 1 -> 2, 3 + - 102: 1 <-> 2, 3 + - 021D: 1 <- 2 -> 3 + - 021U: 1 -> 2 <- 3 + - 021C: 1 -> 2 -> 3 + - 111D: 1 <-> 2 <- 3 + - 111U: 1 <-> 2 -> 3 + - 030T: 1 -> 2 -> 3, 1 -> 3 + - 030C: 1 <- 2 <- 3, 1 -> 3 + - 201: 1 <-> 2 <-> 3 + - 120D: 1 <- 2 -> 3, 1 <-> 3 + - 120U: 1 -> 2 <- 3, 1 <-> 3 + - 120C: 1 -> 2 -> 3, 1 <-> 3 + - 210: 1 -> 2 <-> 3, 1 <-> 3 + - 300: 1 <-> 2 <-> 3, 1 <-> 3 + + Refer to the :doc:`example gallery ` + for visual examples of the triad types. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph + + Returns + ------- + tri_by_type : dict + Dictionary with triad types as keys and lists of triads as values. + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (1, 3), (2, 3), (3, 1), (5, 6), (5, 4), (6, 7)]) + >>> dict = nx.triads_by_type(G) + >>> dict["120C"][0].edges() + OutEdgeView([(1, 2), (1, 3), (2, 3), (3, 1)]) + >>> dict["012"][0].edges() + OutEdgeView([(1, 2)]) + + References + ---------- + .. [1] Snijders, T. (2012). "Transitivity and triads." University of + Oxford. + https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf + """ + # num_triads = o * (o - 1) * (o - 2) // 6 + # if num_triads > TRIAD_LIMIT: print(WARNING) + all_tri = all_triads(G) + tri_by_type = defaultdict(list) + for triad in all_tri: + name = triad_type(triad) + tri_by_type[name].append(triad) + return tri_by_type + + +@not_implemented_for("undirected") +@nx._dispatchable +def triad_type(G): + """Returns the sociological triad type for a triad. + + Parameters + ---------- + G : digraph + A NetworkX DiGraph with 3 nodes + + Returns + ------- + triad_type : str + A string identifying the triad type + + Examples + -------- + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 1)]) + >>> nx.triad_type(G) + '030C' + >>> G.add_edge(1, 3) + >>> nx.triad_type(G) + '120C' + + Notes + ----- + There can be 6 unique edges in a triad (order-3 DiGraph) (so 2^^6=64 unique + triads given 3 nodes). These 64 triads each display exactly 1 of 16 + topologies of triads (topologies can be permuted). These topologies are + identified by the following notation: + + {m}{a}{n}{type} (for example: 111D, 210, 102) + + Here: + + {m} = number of mutual ties (takes 0, 1, 2, 3); a mutual tie is (0,1) + AND (1,0) + {a} = number of asymmetric ties (takes 0, 1, 2, 3); an asymmetric tie + is (0,1) BUT NOT (1,0) or vice versa + {n} = number of null ties (takes 0, 1, 2, 3); a null tie is NEITHER + (0,1) NOR (1,0) + {type} = a letter (takes U, D, C, T) corresponding to up, down, cyclical + and transitive. This is only used for topologies that can have + more than one form (eg: 021D and 021U). + + References + ---------- + .. [1] Snijders, T. (2012). "Transitivity and triads." University of + Oxford. + https://web.archive.org/web/20170830032057/http://www.stats.ox.ac.uk/~snijders/Trans_Triads_ha.pdf + """ + if not is_triad(G): + raise nx.NetworkXAlgorithmError("G is not a triad (order-3 DiGraph)") + num_edges = len(G.edges()) + if num_edges == 0: + return "003" + elif num_edges == 1: + return "012" + elif num_edges == 2: + e1, e2 = G.edges() + if set(e1) == set(e2): + return "102" + elif e1[0] == e2[0]: + return "021D" + elif e1[1] == e2[1]: + return "021U" + elif e1[1] == e2[0] or e2[1] == e1[0]: + return "021C" + elif num_edges == 3: + for e1, e2, e3 in permutations(G.edges(), 3): + if set(e1) == set(e2): + if e3[0] in e1: + return "111U" + # e3[1] in e1: + return "111D" + elif set(e1).symmetric_difference(set(e2)) == set(e3): + if {e1[0], e2[0], e3[0]} == {e1[0], e2[0], e3[0]} == set(G.nodes()): + return "030C" + # e3 == (e1[0], e2[1]) and e2 == (e1[1], e3[1]): + return "030T" + elif num_edges == 4: + for e1, e2, e3, e4 in permutations(G.edges(), 4): + if set(e1) == set(e2): + # identify pair of symmetric edges (which necessarily exists) + if set(e3) == set(e4): + return "201" + if {e3[0]} == {e4[0]} == set(e3).intersection(set(e4)): + return "120D" + if {e3[1]} == {e4[1]} == set(e3).intersection(set(e4)): + return "120U" + if e3[1] == e4[0]: + return "120C" + elif num_edges == 5: + return "210" + elif num_edges == 6: + return "300" diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/vitality.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/vitality.py new file mode 100644 index 0000000..bf4b016 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/vitality.py @@ -0,0 +1,76 @@ +""" +Vitality measures. +""" + +from functools import partial + +import networkx as nx + +__all__ = ["closeness_vitality"] + + +@nx._dispatchable(edge_attrs="weight") +def closeness_vitality(G, node=None, weight=None, wiener_index=None): + """Returns the closeness vitality for nodes in the graph. + + The *closeness vitality* of a node, defined in Section 3.6.2 of [1], + is the change in the sum of distances between all node pairs when + excluding that node. + + Parameters + ---------- + G : NetworkX graph + A strongly-connected graph. + + weight : string + The name of the edge attribute used as weight. This is passed + directly to the :func:`~networkx.wiener_index` function. + + node : object + If specified, only the closeness vitality for this node will be + returned. Otherwise, a dictionary mapping each node to its + closeness vitality will be returned. + + Other parameters + ---------------- + wiener_index : number + If you have already computed the Wiener index of the graph + `G`, you can provide that value here. Otherwise, it will be + computed for you. + + Returns + ------- + dictionary or float + If `node` is None, this function returns a dictionary + with nodes as keys and closeness vitality as the + value. Otherwise, it returns only the closeness vitality for the + specified `node`. + + The closeness vitality of a node may be negative infinity if + removing that node would disconnect the graph. + + Examples + -------- + >>> G = nx.cycle_graph(3) + >>> nx.closeness_vitality(G) + {0: 2.0, 1: 2.0, 2: 2.0} + + See Also + -------- + closeness_centrality + + References + ---------- + .. [1] Ulrik Brandes, Thomas Erlebach (eds.). + *Network Analysis: Methodological Foundations*. + Springer, 2005. + + + """ + if wiener_index is None: + wiener_index = nx.wiener_index(G, weight=weight) + if node is not None: + after = nx.wiener_index(G.subgraph(set(G) - {node}), weight=weight) + return wiener_index - after + vitality = partial(closeness_vitality, G, weight=weight, wiener_index=wiener_index) + return {v: vitality(node=v) for v in G} diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/voronoi.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/voronoi.py new file mode 100644 index 0000000..609a68d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/voronoi.py @@ -0,0 +1,86 @@ +"""Functions for computing the Voronoi cells of a graph.""" + +import networkx as nx +from networkx.utils import groups + +__all__ = ["voronoi_cells"] + + +@nx._dispatchable(edge_attrs="weight") +def voronoi_cells(G, center_nodes, weight="weight"): + """Returns the Voronoi cells centered at `center_nodes` with respect + to the shortest-path distance metric. + + If $C$ is a set of nodes in the graph and $c$ is an element of $C$, + the *Voronoi cell* centered at a node $c$ is the set of all nodes + $v$ that are closer to $c$ than to any other center node in $C$ with + respect to the shortest-path distance metric. [1]_ + + For directed graphs, this will compute the "outward" Voronoi cells, + as defined in [1]_, in which distance is measured from the center + nodes to the target node. For the "inward" Voronoi cells, use the + :meth:`DiGraph.reverse` method to reverse the orientation of the + edges before invoking this function on the directed graph. + + Parameters + ---------- + G : NetworkX graph + + center_nodes : set + A nonempty set of nodes in the graph `G` that represent the + center of the Voronoi cells. + + weight : string or function + The edge attribute (or an arbitrary function) representing the + weight of an edge. This keyword argument is as described in the + documentation for :func:`~networkx.multi_source_dijkstra_path`, + for example. + + Returns + ------- + dictionary + A mapping from center node to set of all nodes in the graph + closer to that center node than to any other center node. The + keys of the dictionary are the element of `center_nodes`, and + the values of the dictionary form a partition of the nodes of + `G`. + + Examples + -------- + To get only the partition of the graph induced by the Voronoi cells, + take the collection of all values in the returned dictionary:: + + >>> G = nx.path_graph(6) + >>> center_nodes = {0, 3} + >>> cells = nx.voronoi_cells(G, center_nodes) + >>> partition = set(map(frozenset, cells.values())) + >>> sorted(map(sorted, partition)) + [[0, 1], [2, 3, 4, 5]] + + Raises + ------ + ValueError + If `center_nodes` is empty. + + References + ---------- + .. [1] Erwig, Martin. (2000),"The graph Voronoi diagram with applications." + *Networks*, 36: 156--163. + https://doi.org/10.1002/1097-0037(200010)36:3<156::AID-NET2>3.0.CO;2-L + + """ + # Determine the shortest paths from any one of the center nodes to + # every node in the graph. + # + # This raises `ValueError` if `center_nodes` is an empty set. + paths = nx.multi_source_dijkstra_path(G, center_nodes, weight=weight) + # Determine the center node from which the shortest path originates. + nearest = {v: p[0] for v, p in paths.items()} + # Get the mapping from center node to all nodes closer to it than to + # any other center node. + cells = groups(nearest) + # We collect all unreachable nodes under a special key, if there are any. + unreachable = set(G) - set(nearest) + if unreachable: + cells["unreachable"] = unreachable + return cells diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/walks.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/walks.py new file mode 100644 index 0000000..0ef9dac --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/walks.py @@ -0,0 +1,79 @@ +"""Function for computing walks in a graph.""" + +import networkx as nx + +__all__ = ["number_of_walks"] + + +@nx._dispatchable +def number_of_walks(G, walk_length): + """Returns the number of walks connecting each pair of nodes in `G` + + A *walk* is a sequence of nodes in which each adjacent pair of nodes + in the sequence is adjacent in the graph. A walk can repeat the same + edge and go in the opposite direction just as people can walk on a + set of paths, but standing still is not counted as part of the walk. + + This function only counts the walks with `walk_length` edges. Note that + the number of nodes in the walk sequence is one more than `walk_length`. + The number of walks can grow very quickly on a larger graph + and with a larger walk length. + + Parameters + ---------- + G : NetworkX graph + + walk_length : int + A nonnegative integer representing the length of a walk. + + Returns + ------- + dict + A dictionary of dictionaries in which outer keys are source + nodes, inner keys are target nodes, and inner values are the + number of walks of length `walk_length` connecting those nodes. + + Raises + ------ + ValueError + If `walk_length` is negative + + Examples + -------- + + >>> G = nx.Graph([(0, 1), (1, 2)]) + >>> walks = nx.number_of_walks(G, 2) + >>> walks + {0: {0: 1, 1: 0, 2: 1}, 1: {0: 0, 1: 2, 2: 0}, 2: {0: 1, 1: 0, 2: 1}} + >>> total_walks = sum(sum(tgts.values()) for _, tgts in walks.items()) + + You can also get the number of walks from a specific source node using the + returned dictionary. For example, number of walks of length 1 from node 0 + can be found as follows: + + >>> walks = nx.number_of_walks(G, 1) + >>> walks[0] + {0: 0, 1: 1, 2: 0} + >>> sum(walks[0].values()) # walks from 0 of length 1 + 1 + + Similarly, a target node can also be specified: + + >>> walks[0][1] + 1 + + """ + import numpy as np + + if walk_length < 0: + raise ValueError(f"`walk_length` cannot be negative: {walk_length}") + + A = nx.adjacency_matrix(G, weight=None) + # TODO: Use matrix_power from scipy.sparse when available + # power = sp.sparse.linalg.matrix_power(A, walk_length) + power = np.linalg.matrix_power(A.toarray(), walk_length) + result = { + u: {v: power.item(u_idx, v_idx) for v_idx, v in enumerate(G)} + for u_idx, u in enumerate(G) + } + return result diff --git a/.venv/lib/python3.12/site-packages/networkx/algorithms/wiener.py b/.venv/lib/python3.12/site-packages/networkx/algorithms/wiener.py new file mode 100644 index 0000000..ac3abe4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/algorithms/wiener.py @@ -0,0 +1,226 @@ +"""Functions related to the Wiener Index of a graph. + +The Wiener Index is a topological measure of a graph +related to the distance between nodes and their degree. +The Schultz Index and Gutman Index are similar measures. +They are used categorize molecules via the network of +atoms connected by chemical bonds. The indices are +correlated with functional aspects of the molecules. + +References +---------- +.. [1] `Wikipedia: Wiener Index `_ +.. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 +""" + +import itertools as it + +import networkx as nx + +__all__ = ["wiener_index", "schultz_index", "gutman_index"] + + +@nx._dispatchable(edge_attrs="weight") +def wiener_index(G, weight=None): + """Returns the Wiener index of the given graph. + + The *Wiener index* of a graph is the sum of the shortest-path + (weighted) distances between each pair of reachable nodes. + For pairs of nodes in undirected graphs, only one orientation + of the pair is counted. + + Parameters + ---------- + G : NetworkX graph + + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. + + Returns + ------- + number + The Wiener index of the graph `G`. + + Raises + ------ + NetworkXError + If the graph `G` is not connected. + + Notes + ----- + If a pair of nodes is not reachable, the distance is assumed to be + infinity. This means that for graphs that are not + strongly-connected, this function returns ``inf``. + + The Wiener index is not usually defined for directed graphs, however + this function uses the natural generalization of the Wiener index to + directed graphs. + + Examples + -------- + The Wiener index of the (unweighted) complete graph on *n* nodes + equals the number of pairs of the *n* nodes, since each pair of + nodes is at distance one:: + + >>> n = 10 + >>> G = nx.complete_graph(n) + >>> nx.wiener_index(G) == n * (n - 1) / 2 + True + + Graphs that are not strongly-connected have infinite Wiener index:: + + >>> G = nx.empty_graph(2) + >>> nx.wiener_index(G) + inf + + References + ---------- + .. [1] `Wikipedia: Wiener Index `_ + """ + connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G) + if not connected: + return float("inf") + + spl = nx.shortest_path_length(G, weight=weight) + total = sum(it.chain.from_iterable(nbrs.values() for node, nbrs in spl)) + # Need to account for double counting pairs of nodes in undirected graphs. + return total if G.is_directed() else total / 2 + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def schultz_index(G, weight=None): + r"""Returns the Schultz Index (of the first kind) of `G` + + The *Schultz Index* [3]_ of a graph is the sum over all node pairs of + distances times the sum of degrees. Consider an undirected graph `G`. + For each node pair ``(u, v)`` compute ``dist(u, v) * (deg(u) + deg(v)`` + where ``dist`` is the shortest path length between two nodes and ``deg`` + is the degree of a node. + + The Schultz Index is the sum of these quantities over all (unordered) + pairs of nodes. + + Parameters + ---------- + G : NetworkX graph + The undirected graph of interest. + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. + + Returns + ------- + number + The first kind of Schultz Index of the graph `G`. + + Examples + -------- + The Schultz Index of the (unweighted) complete graph on *n* nodes + equals the number of pairs of the *n* nodes times ``2 * (n - 1)``, + since each pair of nodes is at distance one and the sum of degree + of two nodes is ``2 * (n - 1)``. + + >>> n = 10 + >>> G = nx.complete_graph(n) + >>> nx.schultz_index(G) == (n * (n - 1) / 2) * (2 * (n - 1)) + True + + Graph that is disconnected + + >>> nx.schultz_index(nx.empty_graph(2)) + inf + + References + ---------- + .. [1] I. Gutman, Selected properties of the Schultz molecular topological index, + J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089. + https://doi.org/10.1021/ci00021a009 + .. [2] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 + .. [3] H. P. Schultz, Topological organic chemistry. 1. + Graph theory and topological indices of alkanes,i + J. Chem. Inf. Comput. Sci. 29 (1989), 239–257. + + """ + if not nx.is_connected(G): + return float("inf") + + spl = nx.shortest_path_length(G, weight=weight) + d = dict(G.degree, weight=weight) + return sum(dist * (d[u] + d[v]) for u, info in spl for v, dist in info.items()) / 2 + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def gutman_index(G, weight=None): + r"""Returns the Gutman Index for the graph `G`. + + The *Gutman Index* measures the topology of networks, especially for molecule + networks of atoms connected by bonds [1]_. It is also called the Schultz Index + of the second kind [2]_. + + Consider an undirected graph `G` with node set ``V``. + The Gutman Index of a graph is the sum over all (unordered) pairs of nodes + of nodes ``(u, v)``, with distance ``dist(u, v)`` and degrees ``deg(u)`` + and ``deg(v)``, of ``dist(u, v) * deg(u) * deg(v)`` + + Parameters + ---------- + G : NetworkX graph + + weight : string or None, optional (default: None) + If None, every edge has weight 1. + If a string, use this edge attribute as the edge weight. + Any edge attribute not present defaults to 1. + The edge weights are used to computing shortest-path distances. + + Returns + ------- + number + The Gutman Index of the graph `G`. + + Examples + -------- + The Gutman Index of the (unweighted) complete graph on *n* nodes + equals the number of pairs of the *n* nodes times ``(n - 1) * (n - 1)``, + since each pair of nodes is at distance one and the product of degree of two + vertices is ``(n - 1) * (n - 1)``. + + >>> n = 10 + >>> G = nx.complete_graph(n) + >>> nx.gutman_index(G) == (n * (n - 1) / 2) * ((n - 1) * (n - 1)) + True + + Graphs that are disconnected + + >>> G = nx.empty_graph(2) + >>> nx.gutman_index(G) + inf + + References + ---------- + .. [1] M.V. Diudeaa and I. Gutman, Wiener-Type Topological Indices, + Croatica Chemica Acta, 71 (1998), 21-51. + https://hrcak.srce.hr/132323 + .. [2] I. Gutman, Selected properties of the Schultz molecular topological index, + J. Chem. Inf. Comput. Sci. 34 (1994), 1087–1089. + https://doi.org/10.1021/ci00021a009 + + """ + if not nx.is_connected(G): + return float("inf") + + spl = nx.shortest_path_length(G, weight=weight) + d = dict(G.degree, weight=weight) + return sum(dist * d[u] * d[v] for u, vinfo in spl for v, dist in vinfo.items()) / 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__init__.py b/.venv/lib/python3.12/site-packages/networkx/classes/__init__.py new file mode 100644 index 0000000..721fa8b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/__init__.py @@ -0,0 +1,13 @@ +from .graph import Graph +from .digraph import DiGraph +from .multigraph import MultiGraph +from .multidigraph import MultiDiGraph + +from .function import * +from .graphviews import subgraph_view, reverse_view + +from networkx.classes import filters + +from networkx.classes import coreviews +from networkx.classes import graphviews +from networkx.classes import reportviews diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..386449c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/coreviews.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/coreviews.cpython-312.pyc new file mode 100644 index 0000000..395c77a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/coreviews.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/digraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/digraph.cpython-312.pyc new file mode 100644 index 0000000..cd6e8e2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/digraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/filters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/filters.cpython-312.pyc new file mode 100644 index 0000000..b25a33b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/filters.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/function.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/function.cpython-312.pyc new file mode 100644 index 0000000..2b11ee3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/function.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/graph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/graph.cpython-312.pyc new file mode 100644 index 0000000..41963c1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/graph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/graphviews.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/graphviews.cpython-312.pyc new file mode 100644 index 0000000..b337cda Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/graphviews.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/multidigraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/multidigraph.cpython-312.pyc new file mode 100644 index 0000000..42a936f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/multidigraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/multigraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/multigraph.cpython-312.pyc new file mode 100644 index 0000000..72e9aff Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/multigraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/reportviews.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/reportviews.cpython-312.pyc new file mode 100644 index 0000000..cd19cb3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/__pycache__/reportviews.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/coreviews.py b/.venv/lib/python3.12/site-packages/networkx/classes/coreviews.py new file mode 100644 index 0000000..4769ffa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/coreviews.py @@ -0,0 +1,435 @@ +"""Views of core data structures such as nested Mappings (e.g. dict-of-dicts). +These ``Views`` often restrict element access, with either the entire view or +layers of nested mappings being read-only. +""" + +from collections.abc import Mapping + +__all__ = [ + "AtlasView", + "AdjacencyView", + "MultiAdjacencyView", + "UnionAtlas", + "UnionAdjacency", + "UnionMultiInner", + "UnionMultiAdjacency", + "FilterAtlas", + "FilterAdjacency", + "FilterMultiInner", + "FilterMultiAdjacency", +] + + +class AtlasView(Mapping): + """An AtlasView is a Read-only Mapping of Mappings. + + It is a View into a dict-of-dict data structure. + The inner level of dict is read-write. But the + outer level is read-only. + + See Also + ======== + AdjacencyView: View into dict-of-dict-of-dict + MultiAdjacencyView: View into dict-of-dict-of-dict-of-dict + """ + + __slots__ = ("_atlas",) + + def __getstate__(self): + return {"_atlas": self._atlas} + + def __setstate__(self, state): + self._atlas = state["_atlas"] + + def __init__(self, d): + self._atlas = d + + def __len__(self): + return len(self._atlas) + + def __iter__(self): + return iter(self._atlas) + + def __getitem__(self, key): + return self._atlas[key] + + def copy(self): + return {n: self[n].copy() for n in self._atlas} + + def __str__(self): + return str(self._atlas) # {nbr: self[nbr] for nbr in self}) + + def __repr__(self): + return f"{self.__class__.__name__}({self._atlas!r})" + + +class AdjacencyView(AtlasView): + """An AdjacencyView is a Read-only Map of Maps of Maps. + + It is a View into a dict-of-dict-of-dict data structure. + The inner level of dict is read-write. But the + outer levels are read-only. + + See Also + ======== + AtlasView: View into dict-of-dict + MultiAdjacencyView: View into dict-of-dict-of-dict-of-dict + """ + + __slots__ = () # Still uses AtlasView slots names _atlas + + def __getitem__(self, name): + return AtlasView(self._atlas[name]) + + def copy(self): + return {n: self[n].copy() for n in self._atlas} + + +class MultiAdjacencyView(AdjacencyView): + """An MultiAdjacencyView is a Read-only Map of Maps of Maps of Maps. + + It is a View into a dict-of-dict-of-dict-of-dict data structure. + The inner level of dict is read-write. But the + outer levels are read-only. + + See Also + ======== + AtlasView: View into dict-of-dict + AdjacencyView: View into dict-of-dict-of-dict + """ + + __slots__ = () # Still uses AtlasView slots names _atlas + + def __getitem__(self, name): + return AdjacencyView(self._atlas[name]) + + def copy(self): + return {n: self[n].copy() for n in self._atlas} + + +class UnionAtlas(Mapping): + """A read-only union of two atlases (dict-of-dict). + + The two dict-of-dicts represent the inner dict of + an Adjacency: `G.succ[node]` and `G.pred[node]`. + The inner level of dict of both hold attribute key:value + pairs and is read-write. But the outer level is read-only. + + See Also + ======== + UnionAdjacency: View into dict-of-dict-of-dict + UnionMultiAdjacency: View into dict-of-dict-of-dict-of-dict + """ + + __slots__ = ("_succ", "_pred") + + def __getstate__(self): + return {"_succ": self._succ, "_pred": self._pred} + + def __setstate__(self, state): + self._succ = state["_succ"] + self._pred = state["_pred"] + + def __init__(self, succ, pred): + self._succ = succ + self._pred = pred + + def __len__(self): + return len(self._succ.keys() | self._pred.keys()) + + def __iter__(self): + return iter(set(self._succ.keys()) | set(self._pred.keys())) + + def __getitem__(self, key): + try: + return self._succ[key] + except KeyError: + return self._pred[key] + + def copy(self): + result = {nbr: dd.copy() for nbr, dd in self._succ.items()} + for nbr, dd in self._pred.items(): + if nbr in result: + result[nbr].update(dd) + else: + result[nbr] = dd.copy() + return result + + def __str__(self): + return str({nbr: self[nbr] for nbr in self}) + + def __repr__(self): + return f"{self.__class__.__name__}({self._succ!r}, {self._pred!r})" + + +class UnionAdjacency(Mapping): + """A read-only union of dict Adjacencies as a Map of Maps of Maps. + + The two input dict-of-dict-of-dicts represent the union of + `G.succ` and `G.pred`. Return values are UnionAtlas + The inner level of dict is read-write. But the + middle and outer levels are read-only. + + succ : a dict-of-dict-of-dict {node: nbrdict} + pred : a dict-of-dict-of-dict {node: nbrdict} + The keys for the two dicts should be the same + + See Also + ======== + UnionAtlas: View into dict-of-dict + UnionMultiAdjacency: View into dict-of-dict-of-dict-of-dict + """ + + __slots__ = ("_succ", "_pred") + + def __getstate__(self): + return {"_succ": self._succ, "_pred": self._pred} + + def __setstate__(self, state): + self._succ = state["_succ"] + self._pred = state["_pred"] + + def __init__(self, succ, pred): + # keys must be the same for two input dicts + assert len(set(succ.keys()) ^ set(pred.keys())) == 0 + self._succ = succ + self._pred = pred + + def __len__(self): + return len(self._succ) # length of each dict should be the same + + def __iter__(self): + return iter(self._succ) + + def __getitem__(self, nbr): + return UnionAtlas(self._succ[nbr], self._pred[nbr]) + + def copy(self): + return {n: self[n].copy() for n in self._succ} + + def __str__(self): + return str({nbr: self[nbr] for nbr in self}) + + def __repr__(self): + return f"{self.__class__.__name__}({self._succ!r}, {self._pred!r})" + + +class UnionMultiInner(UnionAtlas): + """A read-only union of two inner dicts of MultiAdjacencies. + + The two input dict-of-dict-of-dicts represent the union of + `G.succ[node]` and `G.pred[node]` for MultiDiGraphs. + Return values are UnionAtlas. + The inner level of dict is read-write. But the outer levels are read-only. + + See Also + ======== + UnionAtlas: View into dict-of-dict + UnionAdjacency: View into dict-of-dict-of-dict + UnionMultiAdjacency: View into dict-of-dict-of-dict-of-dict + """ + + __slots__ = () # Still uses UnionAtlas slots names _succ, _pred + + def __getitem__(self, node): + in_succ = node in self._succ + in_pred = node in self._pred + if in_succ: + if in_pred: + return UnionAtlas(self._succ[node], self._pred[node]) + return UnionAtlas(self._succ[node], {}) + return UnionAtlas({}, self._pred[node]) + + def copy(self): + nodes = set(self._succ.keys()) | set(self._pred.keys()) + return {n: self[n].copy() for n in nodes} + + +class UnionMultiAdjacency(UnionAdjacency): + """A read-only union of two dict MultiAdjacencies. + + The two input dict-of-dict-of-dict-of-dicts represent the union of + `G.succ` and `G.pred` for MultiDiGraphs. Return values are UnionAdjacency. + The inner level of dict is read-write. But the outer levels are read-only. + + See Also + ======== + UnionAtlas: View into dict-of-dict + UnionMultiInner: View into dict-of-dict-of-dict + """ + + __slots__ = () # Still uses UnionAdjacency slots names _succ, _pred + + def __getitem__(self, node): + return UnionMultiInner(self._succ[node], self._pred[node]) + + +class FilterAtlas(Mapping): # nodedict, nbrdict, keydict + """A read-only Mapping of Mappings with filtering criteria for nodes. + + It is a view into a dict-of-dict data structure, and it selects only + nodes that meet the criteria defined by ``NODE_OK``. + + See Also + ======== + FilterAdjacency + FilterMultiInner + FilterMultiAdjacency + """ + + def __init__(self, d, NODE_OK): + self._atlas = d + self.NODE_OK = NODE_OK + + def __len__(self): + # check whether NODE_OK stores the number of nodes as `length` + # or the nodes themselves as a set `nodes`. If not, count the nodes. + if hasattr(self.NODE_OK, "length"): + return self.NODE_OK.length + if hasattr(self.NODE_OK, "nodes"): + return len(self.NODE_OK.nodes & self._atlas.keys()) + return sum(1 for n in self._atlas if self.NODE_OK(n)) + + def __iter__(self): + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: + return (n for n in self.NODE_OK.nodes if n in self._atlas) + return (n for n in self._atlas if self.NODE_OK(n)) + + def __getitem__(self, key): + if key in self._atlas and self.NODE_OK(key): + return self._atlas[key] + raise KeyError(f"Key {key} not found") + + def __str__(self): + return str({nbr: self[nbr] for nbr in self}) + + def __repr__(self): + return f"{self.__class__.__name__}({self._atlas!r}, {self.NODE_OK!r})" + + +class FilterAdjacency(Mapping): # edgedict + """A read-only Mapping of Mappings with filtering criteria for nodes and edges. + + It is a view into a dict-of-dict-of-dict data structure, and it selects nodes + and edges that satisfy specific criteria defined by ``NODE_OK`` and ``EDGE_OK``, + respectively. + + See Also + ======== + FilterAtlas + FilterMultiInner + FilterMultiAdjacency + """ + + def __init__(self, d, NODE_OK, EDGE_OK): + self._atlas = d + self.NODE_OK = NODE_OK + self.EDGE_OK = EDGE_OK + + def __len__(self): + # check whether NODE_OK stores the number of nodes as `length` + # or the nodes themselves as a set `nodes`. If not, count the nodes. + if hasattr(self.NODE_OK, "length"): + return self.NODE_OK.length + if hasattr(self.NODE_OK, "nodes"): + return len(self.NODE_OK.nodes & self._atlas.keys()) + return sum(1 for n in self._atlas if self.NODE_OK(n)) + + def __iter__(self): + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: + return (n for n in self.NODE_OK.nodes if n in self._atlas) + return (n for n in self._atlas if self.NODE_OK(n)) + + def __getitem__(self, node): + if node in self._atlas and self.NODE_OK(node): + + def new_node_ok(nbr): + return self.NODE_OK(nbr) and self.EDGE_OK(node, nbr) + + return FilterAtlas(self._atlas[node], new_node_ok) + raise KeyError(f"Key {node} not found") + + def __str__(self): + return str({nbr: self[nbr] for nbr in self}) + + def __repr__(self): + name = self.__class__.__name__ + return f"{name}({self._atlas!r}, {self.NODE_OK!r}, {self.EDGE_OK!r})" + + +class FilterMultiInner(FilterAdjacency): # muliedge_seconddict + """A read-only Mapping of Mappings with filtering criteria for nodes and edges. + + It is a view into a dict-of-dict-of-dict-of-dict data structure, and it selects nodes + and edges that meet specific criteria defined by ``NODE_OK`` and ``EDGE_OK``. + + See Also + ======== + FilterAtlas + FilterAdjacency + FilterMultiAdjacency + """ + + def __iter__(self): + try: # check that NODE_OK has attr 'nodes' + node_ok_shorter = 2 * len(self.NODE_OK.nodes) < len(self._atlas) + except AttributeError: + node_ok_shorter = False + if node_ok_shorter: + my_nodes = (n for n in self.NODE_OK.nodes if n in self._atlas) + else: + my_nodes = (n for n in self._atlas if self.NODE_OK(n)) + for n in my_nodes: + some_keys_ok = False + for key in self._atlas[n]: + if self.EDGE_OK(n, key): + some_keys_ok = True + break + if some_keys_ok is True: + yield n + + def __getitem__(self, nbr): + if ( + nbr in self._atlas + and self.NODE_OK(nbr) + and any(self.EDGE_OK(nbr, key) for key in self._atlas[nbr]) + ): + + def new_node_ok(key): + return self.EDGE_OK(nbr, key) + + return FilterAtlas(self._atlas[nbr], new_node_ok) + raise KeyError(f"Key {nbr} not found") + + +class FilterMultiAdjacency(FilterAdjacency): # multiedgedict + """A read-only Mapping of Mappings with filtering criteria + for nodes and edges. + + It is a view into a dict-of-dict-of-dict-of-dict data structure, + and it selects nodes and edges that satisfy specific criteria + defined by ``NODE_OK`` and ``EDGE_OK``, respectively. + + See Also + ======== + FilterAtlas + FilterAdjacency + FilterMultiInner + """ + + def __getitem__(self, node): + if node in self._atlas and self.NODE_OK(node): + + def edge_ok(nbr, key): + return self.NODE_OK(nbr) and self.EDGE_OK(node, nbr, key) + + return FilterMultiInner(self._atlas[node], self.NODE_OK, edge_ok) + raise KeyError(f"Key {node} not found") diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/digraph.py b/.venv/lib/python3.12/site-packages/networkx/classes/digraph.py new file mode 100644 index 0000000..2ba56de --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/digraph.py @@ -0,0 +1,1352 @@ +"""Base class for directed graphs.""" + +from copy import deepcopy +from functools import cached_property + +import networkx as nx +from networkx import convert +from networkx.classes.coreviews import AdjacencyView +from networkx.classes.graph import Graph +from networkx.classes.reportviews import ( + DiDegreeView, + InDegreeView, + InEdgeView, + OutDegreeView, + OutEdgeView, +) +from networkx.exception import NetworkXError + +__all__ = ["DiGraph"] + + +class _CachedPropertyResetterAdjAndSucc: + """Data Descriptor class that syncs and resets cached properties adj and succ + + The cached properties `adj` and `succ` are reset whenever `_adj` or `_succ` + are set to new objects. In addition, the attributes `_succ` and `_adj` + are synced so these two names point to the same object. + + Warning: most of the time, when ``G._adj`` is set, ``G._pred`` should also + be set to maintain a valid data structure. They share datadicts. + + This object sits on a class and ensures that any instance of that + class clears its cached properties "succ" and "adj" whenever the + underlying instance attributes "_succ" or "_adj" are set to a new object. + It only affects the set process of the obj._adj and obj._succ attribute. + All get/del operations act as they normally would. + + For info on Data Descriptors see: https://docs.python.org/3/howto/descriptor.html + """ + + def __set__(self, obj, value): + od = obj.__dict__ + od["_adj"] = value + od["_succ"] = value + # reset cached properties + props = [ + "adj", + "succ", + "edges", + "out_edges", + "degree", + "out_degree", + "in_degree", + ] + for prop in props: + if prop in od: + del od[prop] + + +class _CachedPropertyResetterPred: + """Data Descriptor class for _pred that resets ``pred`` cached_property when needed + + This assumes that the ``cached_property`` ``G.pred`` should be reset whenever + ``G._pred`` is set to a new value. + + Warning: most of the time, when ``G._pred`` is set, ``G._adj`` should also + be set to maintain a valid data structure. They share datadicts. + + This object sits on a class and ensures that any instance of that + class clears its cached property "pred" whenever the underlying + instance attribute "_pred" is set to a new object. It only affects + the set process of the obj._pred attribute. All get/del operations + act as they normally would. + + For info on Data Descriptors see: https://docs.python.org/3/howto/descriptor.html + """ + + def __set__(self, obj, value): + od = obj.__dict__ + od["_pred"] = value + # reset cached properties + props = ["pred", "in_edges", "degree", "out_degree", "in_degree"] + for prop in props: + if prop in od: + del od[prop] + + +class DiGraph(Graph): + """ + Base class for directed graphs. + + A DiGraph stores nodes and edges with optional data, or attributes. + + DiGraphs hold directed edges. Self loops are allowed but multiple + (parallel) edges are not. + + Nodes can be arbitrary (hashable) Python objects with optional + key/value attributes. By convention `None` is not used as a node. + + Edges are represented as links between nodes with optional + key/value attributes. + + Parameters + ---------- + incoming_graph_data : input graph (optional, default: None) + Data to initialize graph. If None (default) an empty + graph is created. The data can be any format that is supported + by the to_networkx_graph() function, currently including edge list, + dict of dicts, dict of lists, NetworkX graph, 2D NumPy array, SciPy + sparse matrix, or PyGraphviz graph. + + attr : keyword arguments, optional (default= no attributes) + Attributes to add to graph as key=value pairs. + + See Also + -------- + Graph + MultiGraph + MultiDiGraph + + Examples + -------- + Create an empty graph structure (a "null graph") with no nodes and + no edges. + + >>> G = nx.DiGraph() + + G can be grown in several ways. + + **Nodes:** + + Add one node at a time: + + >>> G.add_node(1) + + Add the nodes from any container (a list, dict, set or + even the lines from a file or the nodes from another graph). + + >>> G.add_nodes_from([2, 3]) + >>> G.add_nodes_from(range(100, 110)) + >>> H = nx.path_graph(10) + >>> G.add_nodes_from(H) + + In addition to strings and integers any hashable Python object + (except None) can represent a node, e.g. a customized node object, + or even another Graph. + + >>> G.add_node(H) + + **Edges:** + + G can also be grown by adding edges. + + Add one edge, + + >>> G.add_edge(1, 2) + + a list of edges, + + >>> G.add_edges_from([(1, 2), (1, 3)]) + + or a collection of edges, + + >>> G.add_edges_from(H.edges) + + If some edges connect nodes not yet in the graph, the nodes + are added automatically. There are no errors when adding + nodes or edges that already exist. + + **Attributes:** + + Each graph, node, and edge can hold key/value attribute pairs + in an associated attribute dictionary (the keys must be hashable). + By default these are empty, but can be added or changed using + add_edge, add_node or direct manipulation of the attribute + dictionaries named graph, node and edge respectively. + + >>> G = nx.DiGraph(day="Friday") + >>> G.graph + {'day': 'Friday'} + + Add node attributes using add_node(), add_nodes_from() or G.nodes + + >>> G.add_node(1, time="5pm") + >>> G.add_nodes_from([3], time="2pm") + >>> G.nodes[1] + {'time': '5pm'} + >>> G.nodes[1]["room"] = 714 + >>> del G.nodes[1]["room"] # remove attribute + >>> list(G.nodes(data=True)) + [(1, {'time': '5pm'}), (3, {'time': '2pm'})] + + Add edge attributes using add_edge(), add_edges_from(), subscript + notation, or G.edges. + + >>> G.add_edge(1, 2, weight=4.7) + >>> G.add_edges_from([(3, 4), (4, 5)], color="red") + >>> G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})]) + >>> G[1][2]["weight"] = 4.7 + >>> G.edges[1, 2]["weight"] = 4 + + Warning: we protect the graph data structure by making `G.edges[1, 2]` a + read-only dict-like structure. However, you can assign to attributes + in e.g. `G.edges[1, 2]`. Thus, use 2 sets of brackets to add/change + data attributes: `G.edges[1, 2]['weight'] = 4` + (For multigraphs: `MG.edges[u, v, key][name] = value`). + + **Shortcuts:** + + Many common graph features allow python syntax to speed reporting. + + >>> 1 in G # check if node in graph + True + >>> [n for n in G if n < 3] # iterate through nodes + [1, 2] + >>> len(G) # number of nodes in graph + 5 + + Often the best way to traverse all edges of a graph is via the neighbors. + The neighbors are reported as an adjacency-dict `G.adj` or `G.adjacency()` + + >>> for n, nbrsdict in G.adjacency(): + ... for nbr, eattr in nbrsdict.items(): + ... if "weight" in eattr: + ... # Do something useful with the edges + ... pass + + But the edges reporting object is often more convenient: + + >>> for u, v, weight in G.edges(data="weight"): + ... if weight is not None: + ... # Do something useful with the edges + ... pass + + **Reporting:** + + Simple graph information is obtained using object-attributes and methods. + Reporting usually provides views instead of containers to reduce memory + usage. The views update as the graph is updated similarly to dict-views. + The objects `nodes`, `edges` and `adj` provide access to data attributes + via lookup (e.g. `nodes[n]`, `edges[u, v]`, `adj[u][v]`) and iteration + (e.g. `nodes.items()`, `nodes.data('color')`, + `nodes.data('color', default='blue')` and similarly for `edges`) + Views exist for `nodes`, `edges`, `neighbors()`/`adj` and `degree`. + + For details on these and other miscellaneous methods, see below. + + **Subclasses (Advanced):** + + The Graph class uses a dict-of-dict-of-dict data structure. + The outer dict (node_dict) holds adjacency information keyed by node. + The next dict (adjlist_dict) represents the adjacency information and holds + edge data keyed by neighbor. The inner dict (edge_attr_dict) represents + the edge data and holds edge attribute values keyed by attribute names. + + Each of these three dicts can be replaced in a subclass by a user defined + dict-like object. In general, the dict-like features should be + maintained but extra features can be added. To replace one of the + dicts create a new graph class by changing the class(!) variable + holding the factory for that dict-like structure. The variable names are + node_dict_factory, node_attr_dict_factory, adjlist_inner_dict_factory, + adjlist_outer_dict_factory, edge_attr_dict_factory and graph_attr_dict_factory. + + node_dict_factory : function, (default: dict) + Factory function to be used to create the dict containing node + attributes, keyed by node id. + It should require no arguments and return a dict-like object + + node_attr_dict_factory: function, (default: dict) + Factory function to be used to create the node attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object + + adjlist_outer_dict_factory : function, (default: dict) + Factory function to be used to create the outer-most dict + in the data structure that holds adjacency info keyed by node. + It should require no arguments and return a dict-like object. + + adjlist_inner_dict_factory : function, optional (default: dict) + Factory function to be used to create the adjacency list + dict which holds edge data keyed by neighbor. + It should require no arguments and return a dict-like object + + edge_attr_dict_factory : function, optional (default: dict) + Factory function to be used to create the edge attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + graph_attr_dict_factory : function, (default: dict) + Factory function to be used to create the graph attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + Typically, if your extension doesn't impact the data structure all + methods will inherited without issue except: `to_directed/to_undirected`. + By default these methods create a DiGraph/Graph class and you probably + want them to create your extension of a DiGraph/Graph. To facilitate + this we define two class variables that you can set in your subclass. + + to_directed_class : callable, (default: DiGraph or MultiDiGraph) + Class to create a new graph structure in the `to_directed` method. + If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used. + + to_undirected_class : callable, (default: Graph or MultiGraph) + Class to create a new graph structure in the `to_undirected` method. + If `None`, a NetworkX class (Graph or MultiGraph) is used. + + **Subclassing Example** + + Create a low memory graph class that effectively disallows edge + attributes by using a single attribute dict for all edges. + This reduces the memory used, but you lose edge attributes. + + >>> class ThinGraph(nx.Graph): + ... all_edge_dict = {"weight": 1} + ... + ... def single_edge_dict(self): + ... return self.all_edge_dict + ... + ... edge_attr_dict_factory = single_edge_dict + >>> G = ThinGraph() + >>> G.add_edge(2, 1) + >>> G[2][1] + {'weight': 1} + >>> G.add_edge(2, 2) + >>> G[2][1] is G[2][2] + True + """ + + _adj = _CachedPropertyResetterAdjAndSucc() # type: ignore[assignment] + _succ = _adj # type: ignore[has-type] + _pred = _CachedPropertyResetterPred() + + def __init__(self, incoming_graph_data=None, **attr): + """Initialize a graph with edges, name, or graph attributes. + + Parameters + ---------- + incoming_graph_data : input graph (optional, default: None) + Data to initialize graph. If None (default) an empty + graph is created. The data can be an edge list, or any + NetworkX graph object. If the corresponding optional Python + packages are installed the data can also be a 2D NumPy array, a + SciPy sparse array, or a PyGraphviz graph. + + attr : keyword arguments, optional (default= no attributes) + Attributes to add to graph as key=value pairs. + + See Also + -------- + convert + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph(name="my graph") + >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges + >>> G = nx.Graph(e) + + Arbitrary graph attribute pairs (key=value) may be assigned + + >>> G = nx.Graph(e, day="Friday") + >>> G.graph + {'day': 'Friday'} + + """ + self.graph = self.graph_attr_dict_factory() # dictionary for graph attributes + self._node = self.node_dict_factory() # dictionary for node attr + # We store two adjacency lists: + # the predecessors of node n are stored in the dict self._pred + # the successors of node n are stored in the dict self._succ=self._adj + self._adj = self.adjlist_outer_dict_factory() # empty adjacency dict successor + self._pred = self.adjlist_outer_dict_factory() # predecessor + # Note: self._succ = self._adj # successor + + self.__networkx_cache__ = {} + # attempt to load graph with data + if incoming_graph_data is not None: + convert.to_networkx_graph(incoming_graph_data, create_using=self) + # load graph attributes (must be after convert) + self.graph.update(attr) + + @cached_property + def adj(self): + """Graph adjacency object holding the neighbors of each node. + + This object is a read-only dict-like structure with node keys + and neighbor-dict values. The neighbor-dict is keyed by neighbor + to the edge-data-dict. So `G.adj[3][2]['color'] = 'blue'` sets + the color of the edge `(3, 2)` to `"blue"`. + + Iterating over G.adj behaves like a dict. Useful idioms include + `for nbr, datadict in G.adj[n].items():`. + + The neighbor information is also provided by subscripting the graph. + So `for nbr, foovalue in G[node].data('foo', default=1):` works. + + For directed graphs, `G.adj` holds outgoing (successor) info. + """ + return AdjacencyView(self._succ) + + @cached_property + def succ(self): + """Graph adjacency object holding the successors of each node. + + This object is a read-only dict-like structure with node keys + and neighbor-dict values. The neighbor-dict is keyed by neighbor + to the edge-data-dict. So `G.succ[3][2]['color'] = 'blue'` sets + the color of the edge `(3, 2)` to `"blue"`. + + Iterating over G.succ behaves like a dict. Useful idioms include + `for nbr, datadict in G.succ[n].items():`. A data-view not provided + by dicts also exists: `for nbr, foovalue in G.succ[node].data('foo'):` + and a default can be set via a `default` argument to the `data` method. + + The neighbor information is also provided by subscripting the graph. + So `for nbr, foovalue in G[node].data('foo', default=1):` works. + + For directed graphs, `G.adj` is identical to `G.succ`. + """ + return AdjacencyView(self._succ) + + @cached_property + def pred(self): + """Graph adjacency object holding the predecessors of each node. + + This object is a read-only dict-like structure with node keys + and neighbor-dict values. The neighbor-dict is keyed by neighbor + to the edge-data-dict. So `G.pred[2][3]['color'] = 'blue'` sets + the color of the edge `(3, 2)` to `"blue"`. + + Iterating over G.pred behaves like a dict. Useful idioms include + `for nbr, datadict in G.pred[n].items():`. A data-view not provided + by dicts also exists: `for nbr, foovalue in G.pred[node].data('foo'):` + A default can be set via a `default` argument to the `data` method. + """ + return AdjacencyView(self._pred) + + def add_node(self, node_for_adding, **attr): + """Add a single node `node_for_adding` and update node attributes. + + Parameters + ---------- + node_for_adding : node + A node can be any hashable Python object except None. + attr : keyword arguments, optional + Set or change node attributes using key=value. + + See Also + -------- + add_nodes_from + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_node(1) + >>> G.add_node("Hello") + >>> K3 = nx.Graph([(0, 1), (1, 2), (2, 0)]) + >>> G.add_node(K3) + >>> G.number_of_nodes() + 3 + + Use keywords set/change node attributes: + + >>> G.add_node(1, size=10) + >>> G.add_node(3, weight=0.4, UTM=("13S", 382871, 3972649)) + + Notes + ----- + A hashable object is one that can be used as a key in a Python + dictionary. This includes strings, numbers, tuples of strings + and numbers, etc. + + On many platforms hashable items also include mutables such as + NetworkX Graphs, though one should be careful that the hash + doesn't change on mutables. + """ + if node_for_adding not in self._succ: + if node_for_adding is None: + raise ValueError("None cannot be a node") + self._succ[node_for_adding] = self.adjlist_inner_dict_factory() + self._pred[node_for_adding] = self.adjlist_inner_dict_factory() + attr_dict = self._node[node_for_adding] = self.node_attr_dict_factory() + attr_dict.update(attr) + else: # update attr even if node already exists + self._node[node_for_adding].update(attr) + nx._clear_cache(self) + + def add_nodes_from(self, nodes_for_adding, **attr): + """Add multiple nodes. + + Parameters + ---------- + nodes_for_adding : iterable container + A container of nodes (list, dict, set, etc.). + OR + A container of (node, attribute dict) tuples. + Node attributes are updated using the attribute dict. + attr : keyword arguments, optional (default= no attributes) + Update attributes for all nodes in nodes. + Node attributes specified in nodes as a tuple take + precedence over attributes specified via keyword arguments. + + See Also + -------- + add_node + + Notes + ----- + When adding nodes from an iterator over the graph you are changing, + a `RuntimeError` can be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_nodes)`, and pass this + object to `G.add_nodes_from`. + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_nodes_from("Hello") + >>> K3 = nx.Graph([(0, 1), (1, 2), (2, 0)]) + >>> G.add_nodes_from(K3) + >>> sorted(G.nodes(), key=str) + [0, 1, 2, 'H', 'e', 'l', 'o'] + + Use keywords to update specific node attributes for every node. + + >>> G.add_nodes_from([1, 2], size=10) + >>> G.add_nodes_from([3, 4], weight=0.4) + + Use (node, attrdict) tuples to update attributes for specific nodes. + + >>> G.add_nodes_from([(1, dict(size=11)), (2, {"color": "blue"})]) + >>> G.nodes[1]["size"] + 11 + >>> H = nx.Graph() + >>> H.add_nodes_from(G.nodes(data=True)) + >>> H.nodes[1]["size"] + 11 + + Evaluate an iterator over a graph if using it to modify the same graph + + >>> G = nx.DiGraph([(0, 1), (1, 2), (3, 4)]) + >>> # wrong way - will raise RuntimeError + >>> # G.add_nodes_from(n + 1 for n in G.nodes) + >>> # correct way + >>> G.add_nodes_from(list(n + 1 for n in G.nodes)) + """ + for n in nodes_for_adding: + try: + newnode = n not in self._node + newdict = attr + except TypeError: + n, ndict = n + newnode = n not in self._node + newdict = attr.copy() + newdict.update(ndict) + if newnode: + if n is None: + raise ValueError("None cannot be a node") + self._succ[n] = self.adjlist_inner_dict_factory() + self._pred[n] = self.adjlist_inner_dict_factory() + self._node[n] = self.node_attr_dict_factory() + self._node[n].update(newdict) + nx._clear_cache(self) + + def remove_node(self, n): + """Remove node n. + + Removes the node n and all adjacent edges. + Attempting to remove a nonexistent node will raise an exception. + + Parameters + ---------- + n : node + A node in the graph + + Raises + ------ + NetworkXError + If n is not in the graph. + + See Also + -------- + remove_nodes_from + + Examples + -------- + >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> list(G.edges) + [(0, 1), (1, 2)] + >>> G.remove_node(1) + >>> list(G.edges) + [] + + """ + try: + nbrs = self._succ[n] + del self._node[n] + except KeyError as err: # NetworkXError if n not in self + raise NetworkXError(f"The node {n} is not in the digraph.") from err + for u in nbrs: + del self._pred[u][n] # remove all edges n-u in digraph + del self._succ[n] # remove node from succ + for u in self._pred[n]: + del self._succ[u][n] # remove all edges n-u in digraph + del self._pred[n] # remove node from pred + nx._clear_cache(self) + + def remove_nodes_from(self, nodes): + """Remove multiple nodes. + + Parameters + ---------- + nodes : iterable container + A container of nodes (list, dict, set, etc.). If a node + in the container is not in the graph it is silently ignored. + + See Also + -------- + remove_node + + Notes + ----- + When removing nodes from an iterator over the graph you are changing, + a `RuntimeError` will be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_nodes)`, and pass this + object to `G.remove_nodes_from`. + + Examples + -------- + >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> e = list(G.nodes) + >>> e + [0, 1, 2] + >>> G.remove_nodes_from(e) + >>> list(G.nodes) + [] + + Evaluate an iterator over a graph if using it to modify the same graph + + >>> G = nx.DiGraph([(0, 1), (1, 2), (3, 4)]) + >>> # this command will fail, as the graph's dict is modified during iteration + >>> # G.remove_nodes_from(n for n in G.nodes if n < 2) + >>> # this command will work, since the dictionary underlying graph is not modified + >>> G.remove_nodes_from(list(n for n in G.nodes if n < 2)) + """ + for n in nodes: + try: + succs = self._succ[n] + del self._node[n] + for u in succs: + del self._pred[u][n] # remove all edges n-u in digraph + del self._succ[n] # now remove node + for u in self._pred[n]: + del self._succ[u][n] # remove all edges n-u in digraph + del self._pred[n] # now remove node + except KeyError: + pass # silent failure on remove + nx._clear_cache(self) + + def add_edge(self, u_of_edge, v_of_edge, **attr): + """Add an edge between u and v. + + The nodes u and v will be automatically added if they are + not already in the graph. + + Edge attributes can be specified with keywords or by directly + accessing the edge's attribute dictionary. See examples below. + + Parameters + ---------- + u_of_edge, v_of_edge : nodes + Nodes can be, for example, strings or numbers. + Nodes must be hashable (and not None) Python objects. + attr : keyword arguments, optional + Edge data (or labels or objects) can be assigned using + keyword arguments. + + See Also + -------- + add_edges_from : add a collection of edges + + Notes + ----- + Adding an edge that already exists updates the edge data. + + Many NetworkX algorithms designed for weighted graphs use + an edge attribute (by default `weight`) to hold a numerical value. + + Examples + -------- + The following all add the edge e=(1, 2) to graph G: + + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> e = (1, 2) + >>> G.add_edge(1, 2) # explicit two-node form + >>> G.add_edge(*e) # single edge as tuple of two nodes + >>> G.add_edges_from([(1, 2)]) # add edges from iterable container + + Associate data to edges using keywords: + + >>> G.add_edge(1, 2, weight=3) + >>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7) + + For non-string attribute keys, use subscript notation. + + >>> G.add_edge(1, 2) + >>> G[1][2].update({0: 5}) + >>> G.edges[1, 2].update({0: 5}) + """ + u, v = u_of_edge, v_of_edge + # add nodes + if u not in self._succ: + if u is None: + raise ValueError("None cannot be a node") + self._succ[u] = self.adjlist_inner_dict_factory() + self._pred[u] = self.adjlist_inner_dict_factory() + self._node[u] = self.node_attr_dict_factory() + if v not in self._succ: + if v is None: + raise ValueError("None cannot be a node") + self._succ[v] = self.adjlist_inner_dict_factory() + self._pred[v] = self.adjlist_inner_dict_factory() + self._node[v] = self.node_attr_dict_factory() + # add the edge + datadict = self._adj[u].get(v, self.edge_attr_dict_factory()) + datadict.update(attr) + self._succ[u][v] = datadict + self._pred[v][u] = datadict + nx._clear_cache(self) + + def add_edges_from(self, ebunch_to_add, **attr): + """Add all the edges in ebunch_to_add. + + Parameters + ---------- + ebunch_to_add : container of edges + Each edge given in the container will be added to the + graph. The edges must be given as 2-tuples (u, v) or + 3-tuples (u, v, d) where d is a dictionary containing edge data. + attr : keyword arguments, optional + Edge data (or labels or objects) can be assigned using + keyword arguments. + + See Also + -------- + add_edge : add a single edge + add_weighted_edges_from : convenient way to add weighted edges + + Notes + ----- + Adding the same edge twice has no effect but any edge data + will be updated when each duplicate edge is added. + + Edge attributes specified in an ebunch take precedence over + attributes specified via keyword arguments. + + When adding edges from an iterator over the graph you are changing, + a `RuntimeError` can be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_edges)`, and pass this + object to `G.add_edges_from`. + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples + >>> e = zip(range(0, 3), range(1, 4)) + >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 + + Associate data to edges + + >>> G.add_edges_from([(1, 2), (2, 3)], weight=3) + >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898") + + Evaluate an iterator over a graph if using it to modify the same graph + + >>> G = nx.DiGraph([(1, 2), (2, 3), (3, 4)]) + >>> # Grow graph by one new node, adding edges to all existing nodes. + >>> # wrong way - will raise RuntimeError + >>> # G.add_edges_from(((5, n) for n in G.nodes)) + >>> # right way - note that there will be no self-edge for node 5 + >>> G.add_edges_from(list((5, n) for n in G.nodes)) + """ + for e in ebunch_to_add: + ne = len(e) + if ne == 3: + u, v, dd = e + elif ne == 2: + u, v = e + dd = {} + else: + raise NetworkXError(f"Edge tuple {e} must be a 2-tuple or 3-tuple.") + if u not in self._succ: + if u is None: + raise ValueError("None cannot be a node") + self._succ[u] = self.adjlist_inner_dict_factory() + self._pred[u] = self.adjlist_inner_dict_factory() + self._node[u] = self.node_attr_dict_factory() + if v not in self._succ: + if v is None: + raise ValueError("None cannot be a node") + self._succ[v] = self.adjlist_inner_dict_factory() + self._pred[v] = self.adjlist_inner_dict_factory() + self._node[v] = self.node_attr_dict_factory() + datadict = self._adj[u].get(v, self.edge_attr_dict_factory()) + datadict.update(attr) + datadict.update(dd) + self._succ[u][v] = datadict + self._pred[v][u] = datadict + nx._clear_cache(self) + + def remove_edge(self, u, v): + """Remove the edge between u and v. + + Parameters + ---------- + u, v : nodes + Remove the edge between nodes u and v. + + Raises + ------ + NetworkXError + If there is not an edge between u and v. + + See Also + -------- + remove_edges_from : remove a collection of edges + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, etc + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.remove_edge(0, 1) + >>> e = (1, 2) + >>> G.remove_edge(*e) # unpacks e from an edge tuple + >>> e = (2, 3, {"weight": 7}) # an edge with attribute data + >>> G.remove_edge(*e[:2]) # select first part of edge tuple + """ + try: + del self._succ[u][v] + del self._pred[v][u] + except KeyError as err: + raise NetworkXError(f"The edge {u}-{v} not in graph.") from err + nx._clear_cache(self) + + def remove_edges_from(self, ebunch): + """Remove all edges specified in ebunch. + + Parameters + ---------- + ebunch: list or container of edge tuples + Each edge given in the list or container will be removed + from the graph. The edges can be: + + - 2-tuples (u, v) edge between u and v. + - 3-tuples (u, v, k) where k is ignored. + + See Also + -------- + remove_edge : remove a single edge + + Notes + ----- + Will fail silently if an edge in ebunch is not in the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> ebunch = [(1, 2), (2, 3)] + >>> G.remove_edges_from(ebunch) + """ + for e in ebunch: + u, v = e[:2] # ignore edge data + if u in self._succ and v in self._succ[u]: + del self._succ[u][v] + del self._pred[v][u] + nx._clear_cache(self) + + def has_successor(self, u, v): + """Returns True if node u has successor v. + + This is true if graph has the edge u->v. + """ + return u in self._succ and v in self._succ[u] + + def has_predecessor(self, u, v): + """Returns True if node u has predecessor v. + + This is true if graph has the edge u<-v. + """ + return u in self._pred and v in self._pred[u] + + def successors(self, n): + """Returns an iterator over successor nodes of n. + + A successor of n is a node m such that there exists a directed + edge from n to m. + + Parameters + ---------- + n : node + A node in the graph + + Raises + ------ + NetworkXError + If n is not in the graph. + + See Also + -------- + predecessors + + Notes + ----- + neighbors() and successors() are the same. + """ + try: + return iter(self._succ[n]) + except KeyError as err: + raise NetworkXError(f"The node {n} is not in the digraph.") from err + + # digraph definitions + neighbors = successors + + def predecessors(self, n): + """Returns an iterator over predecessor nodes of n. + + A predecessor of n is a node m such that there exists a directed + edge from m to n. + + Parameters + ---------- + n : node + A node in the graph + + Raises + ------ + NetworkXError + If n is not in the graph. + + See Also + -------- + successors + """ + try: + return iter(self._pred[n]) + except KeyError as err: + raise NetworkXError(f"The node {n} is not in the digraph.") from err + + @cached_property + def edges(self): + """An OutEdgeView of the DiGraph as G.edges or G.edges(). + + edges(self, nbunch=None, data=False, default=None) + + The OutEdgeView provides set-like operations on the edge-tuples + as well as edge attribute lookup. When called, it also provides + an EdgeDataView object which allows control of access to edge + attributes (but does not provide set-like operations). + Hence, `G.edges[u, v]['color']` provides the value of the color + attribute for edge `(u, v)` while + `for (u, v, c) in G.edges.data('color', default='red'):` + iterates through all the edges yielding the color attribute + with default `'red'` if no color attribute exists. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges from these nodes. + data : string or bool, optional (default=False) + The edge attribute returned in 3-tuple (u, v, ddict[data]). + If True, return edge attribute dict in 3-tuple (u, v, ddict). + If False, return 2-tuple (u, v). + default : value, optional (default=None) + Value used for edges that don't have the requested attribute. + Only relevant if data is not True or False. + + Returns + ------- + edges : OutEdgeView + A view of edge attributes, usually it iterates over (u, v) + or (u, v, d) tuples of edges, but can also be used for + attribute lookup as `edges[u, v]['foo']`. + + See Also + -------- + in_edges, out_edges + + Notes + ----- + Nodes in nbunch that are not in the graph will be (quietly) ignored. + For directed graphs this returns the out-edges. + + Examples + -------- + >>> G = nx.DiGraph() # or MultiDiGraph, etc + >>> nx.add_path(G, [0, 1, 2]) + >>> G.add_edge(2, 3, weight=5) + >>> [e for e in G.edges] + [(0, 1), (1, 2), (2, 3)] + >>> G.edges.data() # default data is {} (empty dict) + OutEdgeDataView([(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})]) + >>> G.edges.data("weight", default=1) + OutEdgeDataView([(0, 1, 1), (1, 2, 1), (2, 3, 5)]) + >>> G.edges([0, 2]) # only edges originating from these nodes + OutEdgeDataView([(0, 1), (2, 3)]) + >>> G.edges(0) # only edges from node 0 + OutEdgeDataView([(0, 1)]) + + """ + return OutEdgeView(self) + + # alias out_edges to edges + @cached_property + def out_edges(self): + return OutEdgeView(self) + + out_edges.__doc__ = edges.__doc__ + + @cached_property + def in_edges(self): + """A view of the in edges of the graph as G.in_edges or G.in_edges(). + + in_edges(self, nbunch=None, data=False, default=None): + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + data : string or bool, optional (default=False) + The edge attribute returned in 3-tuple (u, v, ddict[data]). + If True, return edge attribute dict in 3-tuple (u, v, ddict). + If False, return 2-tuple (u, v). + default : value, optional (default=None) + Value used for edges that don't have the requested attribute. + Only relevant if data is not True or False. + + Returns + ------- + in_edges : InEdgeView or InEdgeDataView + A view of edge attributes, usually it iterates over (u, v) + or (u, v, d) tuples of edges, but can also be used for + attribute lookup as `edges[u, v]['foo']`. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge(1, 2, color="blue") + >>> G.in_edges() + InEdgeView([(1, 2)]) + >>> G.in_edges(nbunch=2) + InEdgeDataView([(1, 2)]) + + See Also + -------- + edges + """ + return InEdgeView(self) + + @cached_property + def degree(self): + """A DegreeView for the Graph as G.degree or G.degree(). + + The node degree is the number of edges adjacent to the node. + The weighted node degree is the sum of the edge weights for + edges incident to that node. + + This object provides an iterator for (node, degree) as well as + lookup for the degree for a single node. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + DiDegreeView or int + If multiple nodes are requested (the default), returns a `DiDegreeView` + mapping nodes to their degree. + If a single node is requested, returns the degree of the node as an integer. + + See Also + -------- + in_degree, out_degree + + Examples + -------- + >>> G = nx.DiGraph() # or MultiDiGraph + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.degree(0) # node 0 with degree 1 + 1 + >>> list(G.degree([0, 1, 2])) + [(0, 1), (1, 2), (2, 2)] + + """ + return DiDegreeView(self) + + @cached_property + def in_degree(self): + """An InDegreeView for (node, in_degree) or in_degree for single node. + + The node in_degree is the number of edges pointing to the node. + The weighted node degree is the sum of the edge weights for + edges incident to that node. + + This object provides an iteration over (node, in_degree) as well as + lookup for the degree for a single node. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + If a single node is requested + deg : int + In-degree of the node + + OR if multiple nodes are requested + nd_iter : iterator + The iterator returns two-tuples of (node, in-degree). + + See Also + -------- + degree, out_degree + + Examples + -------- + >>> G = nx.DiGraph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.in_degree(0) # node 0 with degree 0 + 0 + >>> list(G.in_degree([0, 1, 2])) + [(0, 0), (1, 1), (2, 1)] + + """ + return InDegreeView(self) + + @cached_property + def out_degree(self): + """An OutDegreeView for (node, out_degree) + + The node out_degree is the number of edges pointing out of the node. + The weighted node degree is the sum of the edge weights for + edges incident to that node. + + This object provides an iterator over (node, out_degree) as well as + lookup for the degree for a single node. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + If a single node is requested + deg : int + Out-degree of the node + + OR if multiple nodes are requested + nd_iter : iterator + The iterator returns two-tuples of (node, out-degree). + + See Also + -------- + degree, in_degree + + Examples + -------- + >>> G = nx.DiGraph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.out_degree(0) # node 0 with degree 1 + 1 + >>> list(G.out_degree([0, 1, 2])) + [(0, 1), (1, 1), (2, 1)] + + """ + return OutDegreeView(self) + + def clear(self): + """Remove all nodes and edges from the graph. + + This also removes the name, and all graph, node, and edge attributes. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.clear() + >>> list(G.nodes) + [] + >>> list(G.edges) + [] + + """ + self._succ.clear() + self._pred.clear() + self._node.clear() + self.graph.clear() + nx._clear_cache(self) + + def clear_edges(self): + """Remove all edges from the graph without altering nodes. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.clear_edges() + >>> list(G.nodes) + [0, 1, 2, 3] + >>> list(G.edges) + [] + + """ + for predecessor_dict in self._pred.values(): + predecessor_dict.clear() + for successor_dict in self._succ.values(): + successor_dict.clear() + nx._clear_cache(self) + + def is_multigraph(self): + """Returns True if graph is a multigraph, False otherwise.""" + return False + + def is_directed(self): + """Returns True if graph is directed, False otherwise.""" + return True + + def to_undirected(self, reciprocal=False, as_view=False): + """Returns an undirected representation of the digraph. + + Parameters + ---------- + reciprocal : bool (optional) + If True only keep edges that appear in both directions + in the original digraph. + as_view : bool (optional, default=False) + If True return an undirected view of the original directed graph. + + Returns + ------- + G : Graph + An undirected graph with the same name and nodes and + with edge (u, v, data) if either (u, v, data) or (v, u, data) + is in the digraph. If both edges exist in digraph and + their edge data is different, only one edge is created + with an arbitrary choice of which edge data to use. + You must check and correct for this manually if desired. + + See Also + -------- + Graph, copy, add_edge, add_edges_from + + Notes + ----- + If edges in both directions (u, v) and (v, u) exist in the + graph, attributes for the new undirected edge will be a combination of + the attributes of the directed edges. The edge data is updated + in the (arbitrary) order that the edges are encountered. For + more customized control of the edge attributes use add_edge(). + + This returns a "deepcopy" of the edge, node, and + graph attributes which attempts to completely copy + all of the data and references. + + This is in contrast to the similar G=DiGraph(D) which returns a + shallow copy of the data. + + See the Python copy module for more information on shallow + and deep copies, https://docs.python.org/3/library/copy.html. + + Warning: If you have subclassed DiGraph to use dict-like objects + in the data structure, those changes do not transfer to the + Graph created by this method. + + Examples + -------- + >>> G = nx.path_graph(2) # or MultiGraph, etc + >>> H = G.to_directed() + >>> list(H.edges) + [(0, 1), (1, 0)] + >>> G2 = H.to_undirected() + >>> list(G2.edges) + [(0, 1)] + """ + graph_class = self.to_undirected_class() + if as_view is True: + return nx.graphviews.generic_graph_view(self, graph_class) + # deepcopy when not a view + G = graph_class() + G.graph.update(deepcopy(self.graph)) + G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) + if reciprocal is True: + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + if v in self._pred[u] + ) + else: + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + ) + return G + + def reverse(self, copy=True): + """Returns the reverse of the graph. + + The reverse is a graph with the same nodes and edges + but with the directions of the edges reversed. + + Parameters + ---------- + copy : bool optional (default=True) + If True, return a new DiGraph holding the reversed edges. + If False, the reverse graph is created using a view of + the original graph. + """ + if copy: + H = self.__class__() + H.graph.update(deepcopy(self.graph)) + H.add_nodes_from((n, deepcopy(d)) for n, d in self.nodes.items()) + H.add_edges_from((v, u, deepcopy(d)) for u, v, d in self.edges(data=True)) + return H + return nx.reverse_view(self) diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/filters.py b/.venv/lib/python3.12/site-packages/networkx/classes/filters.py new file mode 100644 index 0000000..e989e22 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/filters.py @@ -0,0 +1,95 @@ +"""Filter factories to hide or show sets of nodes and edges. + +These filters return the function used when creating `SubGraph`. +""" + +__all__ = [ + "no_filter", + "hide_nodes", + "hide_edges", + "hide_multiedges", + "hide_diedges", + "hide_multidiedges", + "show_nodes", + "show_edges", + "show_multiedges", + "show_diedges", + "show_multidiedges", +] + + +def no_filter(*items): + """Returns a filter function that always evaluates to True.""" + return True + + +def hide_nodes(nodes): + """Returns a filter function that hides specific nodes.""" + nodes = set(nodes) + return lambda node: node not in nodes + + +def hide_diedges(edges): + """Returns a filter function that hides specific directed edges.""" + edges = {(u, v) for u, v in edges} + return lambda u, v: (u, v) not in edges + + +def hide_edges(edges): + """Returns a filter function that hides specific undirected edges.""" + alledges = set(edges) | {(v, u) for (u, v) in edges} + return lambda u, v: (u, v) not in alledges + + +def hide_multidiedges(edges): + """Returns a filter function that hides specific multi-directed edges.""" + edges = {(u, v, k) for u, v, k in edges} + return lambda u, v, k: (u, v, k) not in edges + + +def hide_multiedges(edges): + """Returns a filter function that hides specific multi-undirected edges.""" + alledges = set(edges) | {(v, u, k) for (u, v, k) in edges} + return lambda u, v, k: (u, v, k) not in alledges + + +# write show_nodes as a class to make SubGraph pickleable +class show_nodes: + """Filter class to show specific nodes. + + Attach the set of nodes as an attribute to speed up this commonly used filter + + Note that another allowed attribute for filters is to store the number of nodes + on the filter as attribute `length` (used in `__len__`). It is a user + responsibility to ensure this attribute is accurate if present. + """ + + def __init__(self, nodes): + self.nodes = set(nodes) + + def __call__(self, node): + return node in self.nodes + + +def show_diedges(edges): + """Returns a filter function that shows specific directed edges.""" + edges = {(u, v) for u, v in edges} + return lambda u, v: (u, v) in edges + + +def show_edges(edges): + """Returns a filter function that shows specific undirected edges.""" + alledges = set(edges) | {(v, u) for (u, v) in edges} + return lambda u, v: (u, v) in alledges + + +def show_multidiedges(edges): + """Returns a filter function that shows specific multi-directed edges.""" + edges = {(u, v, k) for u, v, k in edges} + return lambda u, v, k: (u, v, k) in edges + + +def show_multiedges(edges): + """Returns a filter function that shows specific multi-undirected edges.""" + alledges = set(edges) | {(v, u, k) for (u, v, k) in edges} + return lambda u, v, k: (u, v, k) in alledges diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/function.py b/.venv/lib/python3.12/site-packages/networkx/classes/function.py new file mode 100644 index 0000000..6cf7278 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/function.py @@ -0,0 +1,1416 @@ +"""Functional interface to graph methods and assorted utilities.""" + +from collections import Counter +from itertools import chain + +import networkx as nx +from networkx.utils import not_implemented_for, pairwise + +__all__ = [ + "nodes", + "edges", + "degree", + "degree_histogram", + "neighbors", + "number_of_nodes", + "number_of_edges", + "density", + "is_directed", + "freeze", + "is_frozen", + "subgraph", + "induced_subgraph", + "edge_subgraph", + "restricted_view", + "to_directed", + "to_undirected", + "add_star", + "add_path", + "add_cycle", + "create_empty_copy", + "set_node_attributes", + "get_node_attributes", + "remove_node_attributes", + "set_edge_attributes", + "get_edge_attributes", + "remove_edge_attributes", + "all_neighbors", + "non_neighbors", + "non_edges", + "common_neighbors", + "is_weighted", + "is_negatively_weighted", + "is_empty", + "selfloop_edges", + "nodes_with_selfloops", + "number_of_selfloops", + "path_weight", + "is_path", +] + + +def nodes(G): + """Returns a NodeView over the graph nodes. + + This function wraps the :func:`G.nodes ` property. + """ + return G.nodes() + + +def edges(G, nbunch=None): + """Returns an edge view of edges incident to nodes in nbunch. + + Return all edges if nbunch is unspecified or nbunch=None. + + For digraphs, edges=out_edges + + This function wraps the :func:`G.edges ` property. + """ + return G.edges(nbunch) + + +def degree(G, nbunch=None, weight=None): + """Returns a degree view of single node or of nbunch of nodes. + If nbunch is omitted, then return degrees of *all* nodes. + + This function wraps the :func:`G.degree ` property. + """ + return G.degree(nbunch, weight) + + +def neighbors(G, n): + """Returns an iterator over all neighbors of node n. + + This function wraps the :func:`G.neighbors ` function. + """ + return G.neighbors(n) + + +def number_of_nodes(G): + """Returns the number of nodes in the graph. + + This function wraps the :func:`G.number_of_nodes ` function. + """ + return G.number_of_nodes() + + +def number_of_edges(G): + """Returns the number of edges in the graph. + + This function wraps the :func:`G.number_of_edges ` function. + """ + return G.number_of_edges() + + +def density(G): + r"""Returns the density of a graph. + + The density for undirected graphs is + + .. math:: + + d = \frac{2m}{n(n-1)}, + + and for directed graphs is + + .. math:: + + d = \frac{m}{n(n-1)}, + + where `n` is the number of nodes and `m` is the number of edges in `G`. + + Notes + ----- + The density is 0 for a graph without edges and 1 for a complete graph. + The density of multigraphs can be higher than 1. + + Self loops are counted in the total number of edges so graphs with self + loops can have density higher than 1. + """ + n = number_of_nodes(G) + m = number_of_edges(G) + if m == 0 or n <= 1: + return 0 + d = m / (n * (n - 1)) + if not G.is_directed(): + d *= 2 + return d + + +def degree_histogram(G): + """Returns a list of the frequency of each degree value. + + Parameters + ---------- + G : Networkx graph + A graph + + Returns + ------- + hist : list + A list of frequencies of degrees. + The degree values are the index in the list. + + Notes + ----- + Note: the bins are width one, hence len(list) can be large + (Order(number_of_edges)) + """ + counts = Counter(d for n, d in G.degree()) + return [counts.get(i, 0) for i in range(max(counts) + 1 if counts else 0)] + + +def is_directed(G): + """Return True if graph is directed.""" + return G.is_directed() + + +def frozen(*args, **kwargs): + """Dummy method for raising errors when trying to modify frozen graphs""" + raise nx.NetworkXError("Frozen graph can't be modified") + + +def freeze(G): + """Modify graph to prevent further change by adding or removing + nodes or edges. + + Node and edge data can still be modified. + + Parameters + ---------- + G : graph + A NetworkX graph + + Examples + -------- + >>> G = nx.path_graph(4) + >>> G = nx.freeze(G) + >>> try: + ... G.add_edge(4, 5) + ... except nx.NetworkXError as err: + ... print(str(err)) + Frozen graph can't be modified + + Notes + ----- + To "unfreeze" a graph you must make a copy by creating a new graph object: + + >>> graph = nx.path_graph(4) + >>> frozen_graph = nx.freeze(graph) + >>> unfrozen_graph = nx.Graph(frozen_graph) + >>> nx.is_frozen(unfrozen_graph) + False + + See Also + -------- + is_frozen + """ + G.add_node = frozen + G.add_nodes_from = frozen + G.remove_node = frozen + G.remove_nodes_from = frozen + G.add_edge = frozen + G.add_edges_from = frozen + G.add_weighted_edges_from = frozen + G.remove_edge = frozen + G.remove_edges_from = frozen + G.clear = frozen + G.clear_edges = frozen + G.frozen = True + return G + + +def is_frozen(G): + """Returns True if graph is frozen. + + Parameters + ---------- + G : graph + A NetworkX graph + + See Also + -------- + freeze + """ + try: + return G.frozen + except AttributeError: + return False + + +def add_star(G_to_add_to, nodes_for_star, **attr): + """Add a star to Graph G_to_add_to. + + The first node in `nodes_for_star` is the middle of the star. + It is connected to all other nodes. + + Parameters + ---------- + G_to_add_to : graph + A NetworkX graph + nodes_for_star : iterable container + A container of nodes. + attr : keyword arguments, optional (default= no attributes) + Attributes to add to every edge in star. + + See Also + -------- + add_path, add_cycle + + Examples + -------- + >>> G = nx.Graph() + >>> nx.add_star(G, [0, 1, 2, 3]) + >>> nx.add_star(G, [10, 11, 12], weight=2) + """ + nlist = iter(nodes_for_star) + try: + v = next(nlist) + except StopIteration: + return + G_to_add_to.add_node(v) + edges = ((v, n) for n in nlist) + G_to_add_to.add_edges_from(edges, **attr) + + +def add_path(G_to_add_to, nodes_for_path, **attr): + """Add a path to the Graph G_to_add_to. + + Parameters + ---------- + G_to_add_to : graph + A NetworkX graph + nodes_for_path : iterable container + A container of nodes. A path will be constructed from + the nodes (in order) and added to the graph. + attr : keyword arguments, optional (default= no attributes) + Attributes to add to every edge in path. + + See Also + -------- + add_star, add_cycle + + Examples + -------- + >>> G = nx.Graph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> nx.add_path(G, [10, 11, 12], weight=7) + """ + nlist = iter(nodes_for_path) + try: + first_node = next(nlist) + except StopIteration: + return + G_to_add_to.add_node(first_node) + G_to_add_to.add_edges_from(pairwise(chain((first_node,), nlist)), **attr) + + +def add_cycle(G_to_add_to, nodes_for_cycle, **attr): + """Add a cycle to the Graph G_to_add_to. + + Parameters + ---------- + G_to_add_to : graph + A NetworkX graph + nodes_for_cycle: iterable container + A container of nodes. A cycle will be constructed from + the nodes (in order) and added to the graph. + attr : keyword arguments, optional (default= no attributes) + Attributes to add to every edge in cycle. + + See Also + -------- + add_path, add_star + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> nx.add_cycle(G, [0, 1, 2, 3]) + >>> nx.add_cycle(G, [10, 11, 12], weight=7) + """ + nlist = iter(nodes_for_cycle) + try: + first_node = next(nlist) + except StopIteration: + return + G_to_add_to.add_node(first_node) + G_to_add_to.add_edges_from( + pairwise(chain((first_node,), nlist), cyclic=True), **attr + ) + + +def subgraph(G, nbunch): + """Returns the subgraph induced on nodes in nbunch. + + Parameters + ---------- + G : graph + A NetworkX graph + + nbunch : list, iterable + A container of nodes that will be iterated through once (thus + it should be an iterator or be iterable). Each element of the + container should be a valid node type: any hashable type except + None. If nbunch is None, return all edges data in the graph. + Nodes in nbunch that are not in the graph will be (quietly) + ignored. + + Notes + ----- + subgraph(G) calls G.subgraph() + """ + return G.subgraph(nbunch) + + +def induced_subgraph(G, nbunch): + """Returns a SubGraph view of `G` showing only nodes in nbunch. + + The induced subgraph of a graph on a set of nodes N is the + graph with nodes N and edges from G which have both ends in N. + + Parameters + ---------- + G : NetworkX Graph + nbunch : node, container of nodes or None (for all nodes) + + Returns + ------- + subgraph : SubGraph View + A read-only view of the subgraph in `G` induced by the nodes. + Changes to the graph `G` will be reflected in the view. + + Notes + ----- + To create a mutable subgraph with its own copies of nodes + edges and attributes use `subgraph.copy()` or `Graph(subgraph)` + + For an inplace reduction of a graph to a subgraph you can remove nodes: + `G.remove_nodes_from(n in G if n not in set(nbunch))` + + If you are going to compute subgraphs of your subgraphs you could + end up with a chain of views that can be very slow once the chain + has about 15 views in it. If they are all induced subgraphs, you + can short-cut the chain by making them all subgraphs of the original + graph. The graph class method `G.subgraph` does this when `G` is + a subgraph. In contrast, this function allows you to choose to build + chains or not, as you wish. The returned subgraph is a view on `G`. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> H = nx.induced_subgraph(G, [0, 1, 3]) + >>> list(H.edges) + [(0, 1)] + >>> list(H.nodes) + [0, 1, 3] + """ + induced_nodes = nx.filters.show_nodes(G.nbunch_iter(nbunch)) + return nx.subgraph_view(G, filter_node=induced_nodes) + + +def edge_subgraph(G, edges): + """Returns a view of the subgraph induced by the specified edges. + + The induced subgraph contains each edge in `edges` and each + node incident to any of those edges. + + Parameters + ---------- + G : NetworkX Graph + edges : iterable + An iterable of edges. Edges not present in `G` are ignored. + + Returns + ------- + subgraph : SubGraph View + A read-only edge-induced subgraph of `G`. + Changes to `G` are reflected in the view. + + Notes + ----- + To create a mutable subgraph with its own copies of nodes + edges and attributes use `subgraph.copy()` or `Graph(subgraph)` + + If you create a subgraph of a subgraph recursively you can end up + with a chain of subgraphs that becomes very slow with about 15 + nested subgraph views. Luckily the edge_subgraph filter nests + nicely so you can use the original graph as G in this function + to avoid chains. We do not rule out chains programmatically so + that odd cases like an `edge_subgraph` of a `restricted_view` + can be created. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> H = G.edge_subgraph([(0, 1), (3, 4)]) + >>> list(H.nodes) + [0, 1, 3, 4] + >>> list(H.edges) + [(0, 1), (3, 4)] + """ + nxf = nx.filters + edges = set(edges) + nodes = set() + for e in edges: + nodes.update(e[:2]) + induced_nodes = nxf.show_nodes(nodes) + if G.is_multigraph(): + if G.is_directed(): + induced_edges = nxf.show_multidiedges(edges) + else: + induced_edges = nxf.show_multiedges(edges) + else: + if G.is_directed(): + induced_edges = nxf.show_diedges(edges) + else: + induced_edges = nxf.show_edges(edges) + return nx.subgraph_view(G, filter_node=induced_nodes, filter_edge=induced_edges) + + +def restricted_view(G, nodes, edges): + """Returns a view of `G` with hidden nodes and edges. + + The resulting subgraph filters out node `nodes` and edges `edges`. + Filtered out nodes also filter out any of their edges. + + Parameters + ---------- + G : NetworkX Graph + nodes : iterable + An iterable of nodes. Nodes not present in `G` are ignored. + edges : iterable + An iterable of edges. Edges not present in `G` are ignored. + + Returns + ------- + subgraph : SubGraph View + A read-only restricted view of `G` filtering out nodes and edges. + Changes to `G` are reflected in the view. + + Notes + ----- + To create a mutable subgraph with its own copies of nodes + edges and attributes use `subgraph.copy()` or `Graph(subgraph)` + + If you create a subgraph of a subgraph recursively you may end up + with a chain of subgraph views. Such chains can get quite slow + for lengths near 15. To avoid long chains, try to make your subgraph + based on the original graph. We do not rule out chains programmatically + so that odd cases like an `edge_subgraph` of a `restricted_view` + can be created. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> H = nx.restricted_view(G, [0], [(1, 2), (3, 4)]) + >>> list(H.nodes) + [1, 2, 3, 4] + >>> list(H.edges) + [(2, 3)] + """ + nxf = nx.filters + hide_nodes = nxf.hide_nodes(nodes) + if G.is_multigraph(): + if G.is_directed(): + hide_edges = nxf.hide_multidiedges(edges) + else: + hide_edges = nxf.hide_multiedges(edges) + else: + if G.is_directed(): + hide_edges = nxf.hide_diedges(edges) + else: + hide_edges = nxf.hide_edges(edges) + return nx.subgraph_view(G, filter_node=hide_nodes, filter_edge=hide_edges) + + +def to_directed(graph): + """Returns a directed view of the graph `graph`. + + Identical to graph.to_directed(as_view=True) + Note that graph.to_directed defaults to `as_view=False` + while this function always provides a view. + """ + return graph.to_directed(as_view=True) + + +def to_undirected(graph): + """Returns an undirected view of the graph `graph`. + + Identical to graph.to_undirected(as_view=True) + Note that graph.to_undirected defaults to `as_view=False` + while this function always provides a view. + """ + return graph.to_undirected(as_view=True) + + +def create_empty_copy(G, with_data=True): + """Returns a copy of the graph G with all of the edges removed. + + Parameters + ---------- + G : graph + A NetworkX graph + + with_data : bool (default=True) + Propagate Graph and Nodes data to the new graph. + + See Also + -------- + empty_graph + + """ + H = G.__class__() + H.add_nodes_from(G.nodes(data=with_data)) + if with_data: + H.graph.update(G.graph) + return H + + +@nx._dispatchable(preserve_node_attrs=True, mutates_input=True) +def set_node_attributes(G, values, name=None): + """Sets node attributes from a given value or dictionary of values. + + .. Warning:: The call order of arguments `values` and `name` + switched between v1.x & v2.x. + + Parameters + ---------- + G : NetworkX Graph + + values : scalar value, dict-like + What the node attribute should be set to. If `values` is + not a dictionary, then it is treated as a single attribute value + that is then applied to every node in `G`. This means that if + you provide a mutable object, like a list, updates to that object + will be reflected in the node attribute for every node. + The attribute name will be `name`. + + If `values` is a dict or a dict of dict, it should be keyed + by node to either an attribute value or a dict of attribute key/value + pairs used to update the node's attributes. + + name : string (optional, default=None) + Name of the node attribute to set if values is a scalar. + + Examples + -------- + After computing some property of the nodes of a graph, you may want + to assign a node attribute to store the value of that property for + each node:: + + >>> G = nx.path_graph(3) + >>> bb = nx.betweenness_centrality(G) + >>> isinstance(bb, dict) + True + >>> nx.set_node_attributes(G, bb, "betweenness") + >>> G.nodes[1]["betweenness"] + 1.0 + + If you provide a list as the second argument, updates to the list + will be reflected in the node attribute for each node:: + + >>> G = nx.path_graph(3) + >>> labels = [] + >>> nx.set_node_attributes(G, labels, "labels") + >>> labels.append("foo") + >>> G.nodes[0]["labels"] + ['foo'] + >>> G.nodes[1]["labels"] + ['foo'] + >>> G.nodes[2]["labels"] + ['foo'] + + If you provide a dictionary of dictionaries as the second argument, + the outer dictionary is assumed to be keyed by node to an inner + dictionary of node attributes for that node:: + + >>> G = nx.path_graph(3) + >>> attrs = {0: {"attr1": 20, "attr2": "nothing"}, 1: {"attr2": 3}} + >>> nx.set_node_attributes(G, attrs) + >>> G.nodes[0]["attr1"] + 20 + >>> G.nodes[0]["attr2"] + 'nothing' + >>> G.nodes[1]["attr2"] + 3 + >>> G.nodes[2] + {} + + Note that if the dictionary contains nodes that are not in `G`, the + values are silently ignored:: + + >>> G = nx.Graph() + >>> G.add_node(0) + >>> nx.set_node_attributes(G, {0: "red", 1: "blue"}, name="color") + >>> G.nodes[0]["color"] + 'red' + >>> 1 in G.nodes + False + + """ + # Set node attributes based on type of `values` + if name is not None: # `values` must not be a dict of dict + try: # `values` is a dict + for n, v in values.items(): + try: + G.nodes[n][name] = values[n] + except KeyError: + pass + except AttributeError: # `values` is a constant + for n in G: + G.nodes[n][name] = values + else: # `values` must be dict of dict + for n, d in values.items(): + try: + G.nodes[n].update(d) + except KeyError: + pass + nx._clear_cache(G) + + +@nx._dispatchable(node_attrs={"name": "default"}) +def get_node_attributes(G, name, default=None): + """Get node attributes from graph + + Parameters + ---------- + G : NetworkX Graph + + name : string + Attribute name + + default: object (default=None) + Default value of the node attribute if there is no value set for that + node in graph. If `None` then nodes without this attribute are not + included in the returned dict. + + Returns + ------- + Dictionary of attributes keyed by node. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_nodes_from([1, 2, 3], color="red") + >>> color = nx.get_node_attributes(G, "color") + >>> color[1] + 'red' + >>> G.add_node(4) + >>> color = nx.get_node_attributes(G, "color", default="yellow") + >>> color[4] + 'yellow' + """ + if default is not None: + return {n: d.get(name, default) for n, d in G.nodes.items()} + return {n: d[name] for n, d in G.nodes.items() if name in d} + + +@nx._dispatchable(preserve_node_attrs=True, mutates_input=True) +def remove_node_attributes(G, *attr_names, nbunch=None): + """Remove node attributes from all nodes in the graph. + + Parameters + ---------- + G : NetworkX Graph + + *attr_names : List of Strings + The attribute names to remove from the graph. + + nbunch : List of Nodes + Remove the node attributes only from the nodes in this list. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_nodes_from([1, 2, 3], color="blue") + >>> nx.get_node_attributes(G, "color") + {1: 'blue', 2: 'blue', 3: 'blue'} + >>> nx.remove_node_attributes(G, "color") + >>> nx.get_node_attributes(G, "color") + {} + """ + + if nbunch is None: + nbunch = G.nodes() + + for attr in attr_names: + for n, d in G.nodes(data=True): + if n in nbunch: + try: + del d[attr] + except KeyError: + pass + + +@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True) +def set_edge_attributes(G, values, name=None): + """Sets edge attributes from a given value or dictionary of values. + + .. Warning:: The call order of arguments `values` and `name` + switched between v1.x & v2.x. + + Parameters + ---------- + G : NetworkX Graph + + values : scalar value, dict-like + What the edge attribute should be set to. If `values` is + not a dictionary, then it is treated as a single attribute value + that is then applied to every edge in `G`. This means that if + you provide a mutable object, like a list, updates to that object + will be reflected in the edge attribute for each edge. The attribute + name will be `name`. + + If `values` is a dict or a dict of dict, it should be keyed + by edge tuple to either an attribute value or a dict of attribute + key/value pairs used to update the edge's attributes. + For multigraphs, the edge tuples must be of the form ``(u, v, key)``, + where `u` and `v` are nodes and `key` is the edge key. + For non-multigraphs, the keys must be tuples of the form ``(u, v)``. + + name : string (optional, default=None) + Name of the edge attribute to set if values is a scalar. + + Examples + -------- + After computing some property of the edges of a graph, you may want + to assign a edge attribute to store the value of that property for + each edge:: + + >>> G = nx.path_graph(3) + >>> bb = nx.edge_betweenness_centrality(G, normalized=False) + >>> nx.set_edge_attributes(G, bb, "betweenness") + >>> G.edges[1, 2]["betweenness"] + 2.0 + + If you provide a list as the second argument, updates to the list + will be reflected in the edge attribute for each edge:: + + >>> labels = [] + >>> nx.set_edge_attributes(G, labels, "labels") + >>> labels.append("foo") + >>> G.edges[0, 1]["labels"] + ['foo'] + >>> G.edges[1, 2]["labels"] + ['foo'] + + If you provide a dictionary of dictionaries as the second argument, + the entire dictionary will be used to update edge attributes:: + + >>> G = nx.path_graph(3) + >>> attrs = {(0, 1): {"attr1": 20, "attr2": "nothing"}, (1, 2): {"attr2": 3}} + >>> nx.set_edge_attributes(G, attrs) + >>> G[0][1]["attr1"] + 20 + >>> G[0][1]["attr2"] + 'nothing' + >>> G[1][2]["attr2"] + 3 + + The attributes of one Graph can be used to set those of another. + + >>> H = nx.path_graph(3) + >>> nx.set_edge_attributes(H, G.edges) + + Note that if the dict contains edges that are not in `G`, they are + silently ignored:: + + >>> G = nx.Graph([(0, 1)]) + >>> nx.set_edge_attributes(G, {(1, 2): {"weight": 2.0}}) + >>> (1, 2) in G.edges() + False + + For multigraphs, the `values` dict is expected to be keyed by 3-tuples + including the edge key:: + + >>> MG = nx.MultiGraph() + >>> edges = [(0, 1), (0, 1)] + >>> MG.add_edges_from(edges) # Returns list of edge keys + [0, 1] + >>> attributes = {(0, 1, 0): {"cost": 21}, (0, 1, 1): {"cost": 7}} + >>> nx.set_edge_attributes(MG, attributes) + >>> MG[0][1][0]["cost"] + 21 + >>> MG[0][1][1]["cost"] + 7 + + If MultiGraph attributes are desired for a Graph, you must convert the 3-tuple + multiedge to a 2-tuple edge and the last multiedge's attribute value will + overwrite the previous values. Continuing from the previous case we get:: + + >>> H = nx.path_graph([0, 1, 2]) + >>> nx.set_edge_attributes(H, {(u, v): ed for u, v, ed in MG.edges.data()}) + >>> nx.get_edge_attributes(H, "cost") + {(0, 1): 7} + + """ + if name is not None: + # `values` does not contain attribute names + try: + # if `values` is a dict using `.items()` => {edge: value} + if G.is_multigraph(): + for (u, v, key), value in values.items(): + try: + G._adj[u][v][key][name] = value + except KeyError: + pass + else: + for (u, v), value in values.items(): + try: + G._adj[u][v][name] = value + except KeyError: + pass + except AttributeError: + # treat `values` as a constant + for u, v, data in G.edges(data=True): + data[name] = values + else: + # `values` consists of doct-of-dict {edge: {attr: value}} shape + if G.is_multigraph(): + for (u, v, key), d in values.items(): + try: + G._adj[u][v][key].update(d) + except KeyError: + pass + else: + for (u, v), d in values.items(): + try: + G._adj[u][v].update(d) + except KeyError: + pass + nx._clear_cache(G) + + +@nx._dispatchable(edge_attrs={"name": "default"}) +def get_edge_attributes(G, name, default=None): + """Get edge attributes from graph + + Parameters + ---------- + G : NetworkX Graph + + name : string + Attribute name + + default: object (default=None) + Default value of the edge attribute if there is no value set for that + edge in graph. If `None` then edges without this attribute are not + included in the returned dict. + + Returns + ------- + Dictionary of attributes keyed by edge. For (di)graphs, the keys are + 2-tuples of the form: (u, v). For multi(di)graphs, the keys are 3-tuples of + the form: (u, v, key). + + Examples + -------- + >>> G = nx.Graph() + >>> nx.add_path(G, [1, 2, 3], color="red") + >>> color = nx.get_edge_attributes(G, "color") + >>> color[(1, 2)] + 'red' + >>> G.add_edge(3, 4) + >>> color = nx.get_edge_attributes(G, "color", default="yellow") + >>> color[(3, 4)] + 'yellow' + """ + if G.is_multigraph(): + edges = G.edges(keys=True, data=True) + else: + edges = G.edges(data=True) + if default is not None: + return {x[:-1]: x[-1].get(name, default) for x in edges} + return {x[:-1]: x[-1][name] for x in edges if name in x[-1]} + + +@nx._dispatchable(preserve_edge_attrs=True, mutates_input=True) +def remove_edge_attributes(G, *attr_names, ebunch=None): + """Remove edge attributes from all edges in the graph. + + Parameters + ---------- + G : NetworkX Graph + + *attr_names : List of Strings + The attribute names to remove from the graph. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> nx.set_edge_attributes(G, {(u, v): u + v for u, v in G.edges()}, name="weight") + >>> nx.get_edge_attributes(G, "weight") + {(0, 1): 1, (1, 2): 3} + >>> remove_edge_attributes(G, "weight") + >>> nx.get_edge_attributes(G, "weight") + {} + """ + if ebunch is None: + ebunch = G.edges(keys=True) if G.is_multigraph() else G.edges() + + for attr in attr_names: + edges = ( + G.edges(keys=True, data=True) if G.is_multigraph() else G.edges(data=True) + ) + for *e, d in edges: + if tuple(e) in ebunch: + try: + del d[attr] + except KeyError: + pass + + +def all_neighbors(graph, node): + """Returns all of the neighbors of a node in the graph. + + If the graph is directed returns predecessors as well as successors. + + Parameters + ---------- + graph : NetworkX graph + Graph to find neighbors. + + node : node + The node whose neighbors will be returned. + + Returns + ------- + neighbors : iterator + Iterator of neighbors + """ + if graph.is_directed(): + values = chain(graph.predecessors(node), graph.successors(node)) + else: + values = graph.neighbors(node) + return values + + +def non_neighbors(graph, node): + """Returns the non-neighbors of the node in the graph. + + Parameters + ---------- + graph : NetworkX graph + Graph to find neighbors. + + node : node + The node whose neighbors will be returned. + + Returns + ------- + non_neighbors : set + Set of nodes in the graph that are not neighbors of the node. + """ + return graph._adj.keys() - graph._adj[node].keys() - {node} + + +def non_edges(graph): + """Returns the nonexistent edges in the graph. + + Parameters + ---------- + graph : NetworkX graph. + Graph to find nonexistent edges. + + Returns + ------- + non_edges : iterator + Iterator of edges that are not in the graph. + """ + if graph.is_directed(): + for u in graph: + for v in non_neighbors(graph, u): + yield (u, v) + else: + nodes = set(graph) + while nodes: + u = nodes.pop() + for v in nodes - set(graph[u]): + yield (u, v) + + +@not_implemented_for("directed") +def common_neighbors(G, u, v): + """Returns the common neighbors of two nodes in a graph. + + Parameters + ---------- + G : graph + A NetworkX undirected graph. + + u, v : nodes + Nodes in the graph. + + Returns + ------- + cnbors : set + Set of common neighbors of u and v in the graph. + + Raises + ------ + NetworkXError + If u or v is not a node in the graph. + + Examples + -------- + >>> G = nx.complete_graph(5) + >>> sorted(nx.common_neighbors(G, 0, 1)) + [2, 3, 4] + """ + if u not in G: + raise nx.NetworkXError("u is not in the graph.") + if v not in G: + raise nx.NetworkXError("v is not in the graph.") + + return G._adj[u].keys() & G._adj[v].keys() - {u, v} + + +@nx._dispatchable(preserve_edge_attrs=True) +def is_weighted(G, edge=None, weight="weight"): + """Returns True if `G` has weighted edges. + + Parameters + ---------- + G : graph + A NetworkX graph. + + edge : tuple, optional + A 2-tuple specifying the only edge in `G` that will be tested. If + None, then every edge in `G` is tested. + + weight: string, optional + The attribute name used to query for edge weights. + + Returns + ------- + bool + A boolean signifying if `G`, or the specified edge, is weighted. + + Raises + ------ + NetworkXError + If the specified edge does not exist. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.is_weighted(G) + False + >>> nx.is_weighted(G, (2, 3)) + False + + >>> G = nx.DiGraph() + >>> G.add_edge(1, 2, weight=1) + >>> nx.is_weighted(G) + True + + """ + if edge is not None: + data = G.get_edge_data(*edge) + if data is None: + msg = f"Edge {edge!r} does not exist." + raise nx.NetworkXError(msg) + return weight in data + + if is_empty(G): + # Special handling required since: all([]) == True + return False + + return all(weight in data for u, v, data in G.edges(data=True)) + + +@nx._dispatchable(edge_attrs="weight") +def is_negatively_weighted(G, edge=None, weight="weight"): + """Returns True if `G` has negatively weighted edges. + + Parameters + ---------- + G : graph + A NetworkX graph. + + edge : tuple, optional + A 2-tuple specifying the only edge in `G` that will be tested. If + None, then every edge in `G` is tested. + + weight: string, optional + The attribute name used to query for edge weights. + + Returns + ------- + bool + A boolean signifying if `G`, or the specified edge, is negatively + weighted. + + Raises + ------ + NetworkXError + If the specified edge does not exist. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edges_from([(1, 3), (2, 4), (2, 6)]) + >>> G.add_edge(1, 2, weight=4) + >>> nx.is_negatively_weighted(G, (1, 2)) + False + >>> G[2][4]["weight"] = -2 + >>> nx.is_negatively_weighted(G) + True + >>> G = nx.DiGraph() + >>> edges = [("0", "3", 3), ("0", "1", -5), ("1", "0", -2)] + >>> G.add_weighted_edges_from(edges) + >>> nx.is_negatively_weighted(G) + True + + """ + if edge is not None: + data = G.get_edge_data(*edge) + if data is None: + msg = f"Edge {edge!r} does not exist." + raise nx.NetworkXError(msg) + return weight in data and data[weight] < 0 + + return any(weight in data and data[weight] < 0 for u, v, data in G.edges(data=True)) + + +@nx._dispatchable +def is_empty(G): + """Returns True if `G` has no edges. + + Parameters + ---------- + G : graph + A NetworkX graph. + + Returns + ------- + bool + True if `G` has no edges, and False otherwise. + + Notes + ----- + An empty graph can have nodes but not edges. The empty graph with zero + nodes is known as the null graph. This is an $O(n)$ operation where n + is the number of nodes in the graph. + + """ + return not any(G._adj.values()) + + +def nodes_with_selfloops(G): + """Returns an iterator over nodes with self loops. + + A node with a self loop has an edge with both ends adjacent + to that node. + + Returns + ------- + nodelist : iterator + A iterator over nodes with self loops. + + See Also + -------- + selfloop_edges, number_of_selfloops + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edge(1, 1) + >>> G.add_edge(1, 2) + >>> list(nx.nodes_with_selfloops(G)) + [1] + + """ + return (n for n, nbrs in G._adj.items() if n in nbrs) + + +def selfloop_edges(G, data=False, keys=False, default=None): + """Returns an iterator over selfloop edges. + + A selfloop edge has the same node at both ends. + + Parameters + ---------- + G : graph + A NetworkX graph. + data : string or bool, optional (default=False) + Return selfloop edges as two tuples (u, v) (data=False) + or three-tuples (u, v, datadict) (data=True) + or three-tuples (u, v, datavalue) (data='attrname') + keys : bool, optional (default=False) + If True, return edge keys with each edge. + default : value, optional (default=None) + Value used for edges that don't have the requested attribute. + Only relevant if data is not True or False. + + Returns + ------- + edgeiter : iterator over edge tuples + An iterator over all selfloop edges. + + See Also + -------- + nodes_with_selfloops, number_of_selfloops + + Examples + -------- + >>> G = nx.MultiGraph() # or Graph, DiGraph, MultiDiGraph, etc + >>> ekey = G.add_edge(1, 1) + >>> ekey = G.add_edge(1, 2) + >>> list(nx.selfloop_edges(G)) + [(1, 1)] + >>> list(nx.selfloop_edges(G, data=True)) + [(1, 1, {})] + >>> list(nx.selfloop_edges(G, keys=True)) + [(1, 1, 0)] + >>> list(nx.selfloop_edges(G, keys=True, data=True)) + [(1, 1, 0, {})] + """ + if data is True: + if G.is_multigraph(): + if keys is True: + return ( + (n, n, k, d) + for n, nbrs in G._adj.items() + if n in nbrs + for k, d in nbrs[n].items() + ) + else: + return ( + (n, n, d) + for n, nbrs in G._adj.items() + if n in nbrs + for d in nbrs[n].values() + ) + else: + return ((n, n, nbrs[n]) for n, nbrs in G._adj.items() if n in nbrs) + elif data is not False: + if G.is_multigraph(): + if keys is True: + return ( + (n, n, k, d.get(data, default)) + for n, nbrs in G._adj.items() + if n in nbrs + for k, d in nbrs[n].items() + ) + else: + return ( + (n, n, d.get(data, default)) + for n, nbrs in G._adj.items() + if n in nbrs + for d in nbrs[n].values() + ) + else: + return ( + (n, n, nbrs[n].get(data, default)) + for n, nbrs in G._adj.items() + if n in nbrs + ) + else: + if G.is_multigraph(): + if keys is True: + return ( + (n, n, k) + for n, nbrs in G._adj.items() + if n in nbrs + for k in nbrs[n] + ) + else: + return ( + (n, n) + for n, nbrs in G._adj.items() + if n in nbrs + for i in range(len(nbrs[n])) # for easy edge removal (#4068) + ) + else: + return ((n, n) for n, nbrs in G._adj.items() if n in nbrs) + + +@nx._dispatchable +def number_of_selfloops(G): + """Returns the number of selfloop edges. + + A selfloop edge has the same node at both ends. + + Returns + ------- + nloops : int + The number of selfloops. + + See Also + -------- + nodes_with_selfloops, selfloop_edges + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edge(1, 1) + >>> G.add_edge(1, 2) + >>> nx.number_of_selfloops(G) + 1 + """ + return sum(1 for _ in nx.selfloop_edges(G)) + + +def is_path(G, path): + """Returns whether or not the specified path exists. + + For it to return True, every node on the path must exist and + each consecutive pair must be connected via one or more edges. + + Parameters + ---------- + G : graph + A NetworkX graph. + + path : list + A list of nodes which defines the path to traverse + + Returns + ------- + bool + True if `path` is a valid path in `G` + + """ + try: + return all(nbr in G._adj[node] for node, nbr in nx.utils.pairwise(path)) + except (KeyError, TypeError): + return False + + +def path_weight(G, path, weight): + """Returns total cost associated with specified path and weight + + Parameters + ---------- + G : graph + A NetworkX graph. + + path: list + A list of node labels which defines the path to traverse + + weight: string + A string indicating which edge attribute to use for path cost + + Returns + ------- + cost: int or float + An integer or a float representing the total cost with respect to the + specified weight of the specified path + + Raises + ------ + NetworkXNoPath + If the specified edge does not exist. + """ + multigraph = G.is_multigraph() + cost = 0 + + if not nx.is_path(G, path): + raise nx.NetworkXNoPath("path does not exist") + for node, nbr in nx.utils.pairwise(path): + if multigraph: + cost += min(v[weight] for v in G._adj[node][nbr].values()) + else: + cost += G._adj[node][nbr][weight] + return cost diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/graph.py b/.venv/lib/python3.12/site-packages/networkx/classes/graph.py new file mode 100644 index 0000000..e6adcf0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/graph.py @@ -0,0 +1,2071 @@ +"""Base class for undirected graphs. + +The Graph class allows any hashable object as a node +and can associate key/value attribute pairs with each undirected edge. + +Self-loops are allowed but multiple edges are not (see MultiGraph). + +For directed graphs see DiGraph and MultiDiGraph. +""" + +from copy import deepcopy +from functools import cached_property + +import networkx as nx +from networkx import convert +from networkx.classes.coreviews import AdjacencyView +from networkx.classes.reportviews import DegreeView, EdgeView, NodeView +from networkx.exception import NetworkXError + +__all__ = ["Graph"] + + +class _CachedPropertyResetterAdj: + """Data Descriptor class for _adj that resets ``adj`` cached_property when needed + + This assumes that the ``cached_property`` ``G.adj`` should be reset whenever + ``G._adj`` is set to a new value. + + This object sits on a class and ensures that any instance of that + class clears its cached property "adj" whenever the underlying + instance attribute "_adj" is set to a new object. It only affects + the set process of the obj._adj attribute. All get/del operations + act as they normally would. + + For info on Data Descriptors see: https://docs.python.org/3/howto/descriptor.html + """ + + def __set__(self, obj, value): + od = obj.__dict__ + od["_adj"] = value + # reset cached properties + props = ["adj", "edges", "degree"] + for prop in props: + if prop in od: + del od[prop] + + +class _CachedPropertyResetterNode: + """Data Descriptor class for _node that resets ``nodes`` cached_property when needed + + This assumes that the ``cached_property`` ``G.node`` should be reset whenever + ``G._node`` is set to a new value. + + This object sits on a class and ensures that any instance of that + class clears its cached property "nodes" whenever the underlying + instance attribute "_node" is set to a new object. It only affects + the set process of the obj._adj attribute. All get/del operations + act as they normally would. + + For info on Data Descriptors see: https://docs.python.org/3/howto/descriptor.html + """ + + def __set__(self, obj, value): + od = obj.__dict__ + od["_node"] = value + # reset cached properties + if "nodes" in od: + del od["nodes"] + + +class Graph: + """ + Base class for undirected graphs. + + A Graph stores nodes and edges with optional data, or attributes. + + Graphs hold undirected edges. Self loops are allowed but multiple + (parallel) edges are not. + + Nodes can be arbitrary (hashable) Python objects with optional + key/value attributes, except that `None` is not allowed as a node. + + Edges are represented as links between nodes with optional + key/value attributes. + + Parameters + ---------- + incoming_graph_data : input graph (optional, default: None) + Data to initialize graph. If None (default) an empty + graph is created. The data can be any format that is supported + by the to_networkx_graph() function, currently including edge list, + dict of dicts, dict of lists, NetworkX graph, 2D NumPy array, SciPy + sparse matrix, or PyGraphviz graph. + + attr : keyword arguments, optional (default= no attributes) + Attributes to add to graph as key=value pairs. + + See Also + -------- + DiGraph + MultiGraph + MultiDiGraph + + Examples + -------- + Create an empty graph structure (a "null graph") with no nodes and + no edges. + + >>> G = nx.Graph() + + G can be grown in several ways. + + **Nodes:** + + Add one node at a time: + + >>> G.add_node(1) + + Add the nodes from any container (a list, dict, set or + even the lines from a file or the nodes from another graph). + + >>> G.add_nodes_from([2, 3]) + >>> G.add_nodes_from(range(100, 110)) + >>> H = nx.path_graph(10) + >>> G.add_nodes_from(H) + + In addition to strings and integers any hashable Python object + (except None) can represent a node, e.g. a customized node object, + or even another Graph. + + >>> G.add_node(H) + + **Edges:** + + G can also be grown by adding edges. + + Add one edge, + + >>> G.add_edge(1, 2) + + a list of edges, + + >>> G.add_edges_from([(1, 2), (1, 3)]) + + or a collection of edges, + + >>> G.add_edges_from(H.edges) + + If some edges connect nodes not yet in the graph, the nodes + are added automatically. There are no errors when adding + nodes or edges that already exist. + + **Attributes:** + + Each graph, node, and edge can hold key/value attribute pairs + in an associated attribute dictionary (the keys must be hashable). + By default these are empty, but can be added or changed using + add_edge, add_node or direct manipulation of the attribute + dictionaries named graph, node and edge respectively. + + >>> G = nx.Graph(day="Friday") + >>> G.graph + {'day': 'Friday'} + + Add node attributes using add_node(), add_nodes_from() or G.nodes + + >>> G.add_node(1, time="5pm") + >>> G.add_nodes_from([3], time="2pm") + >>> G.nodes[1] + {'time': '5pm'} + >>> G.nodes[1]["room"] = 714 # node must exist already to use G.nodes + >>> del G.nodes[1]["room"] # remove attribute + >>> list(G.nodes(data=True)) + [(1, {'time': '5pm'}), (3, {'time': '2pm'})] + + Add edge attributes using add_edge(), add_edges_from(), subscript + notation, or G.edges. + + >>> G.add_edge(1, 2, weight=4.7) + >>> G.add_edges_from([(3, 4), (4, 5)], color="red") + >>> G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})]) + >>> G[1][2]["weight"] = 4.7 + >>> G.edges[1, 2]["weight"] = 4 + + Warning: we protect the graph data structure by making `G.edges` a + read-only dict-like structure. However, you can assign to attributes + in e.g. `G.edges[1, 2]`. Thus, use 2 sets of brackets to add/change + data attributes: `G.edges[1, 2]['weight'] = 4` + (For multigraphs: `MG.edges[u, v, key][name] = value`). + + **Shortcuts:** + + Many common graph features allow python syntax to speed reporting. + + >>> 1 in G # check if node in graph + True + >>> [n for n in G if n < 3] # iterate through nodes + [1, 2] + >>> len(G) # number of nodes in graph + 5 + + Often the best way to traverse all edges of a graph is via the neighbors. + The neighbors are reported as an adjacency-dict `G.adj` or `G.adjacency()` + + >>> for n, nbrsdict in G.adjacency(): + ... for nbr, eattr in nbrsdict.items(): + ... if "weight" in eattr: + ... # Do something useful with the edges + ... pass + + But the edges() method is often more convenient: + + >>> for u, v, weight in G.edges.data("weight"): + ... if weight is not None: + ... # Do something useful with the edges + ... pass + + **Reporting:** + + Simple graph information is obtained using object-attributes and methods. + Reporting typically provides views instead of containers to reduce memory + usage. The views update as the graph is updated similarly to dict-views. + The objects `nodes`, `edges` and `adj` provide access to data attributes + via lookup (e.g. `nodes[n]`, `edges[u, v]`, `adj[u][v]`) and iteration + (e.g. `nodes.items()`, `nodes.data('color')`, + `nodes.data('color', default='blue')` and similarly for `edges`) + Views exist for `nodes`, `edges`, `neighbors()`/`adj` and `degree`. + + For details on these and other miscellaneous methods, see below. + + **Subclasses (Advanced):** + + The Graph class uses a dict-of-dict-of-dict data structure. + The outer dict (node_dict) holds adjacency information keyed by node. + The next dict (adjlist_dict) represents the adjacency information and holds + edge data keyed by neighbor. The inner dict (edge_attr_dict) represents + the edge data and holds edge attribute values keyed by attribute names. + + Each of these three dicts can be replaced in a subclass by a user defined + dict-like object. In general, the dict-like features should be + maintained but extra features can be added. To replace one of the + dicts create a new graph class by changing the class(!) variable + holding the factory for that dict-like structure. + + node_dict_factory : function, (default: dict) + Factory function to be used to create the dict containing node + attributes, keyed by node id. + It should require no arguments and return a dict-like object + + node_attr_dict_factory: function, (default: dict) + Factory function to be used to create the node attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object + + adjlist_outer_dict_factory : function, (default: dict) + Factory function to be used to create the outer-most dict + in the data structure that holds adjacency info keyed by node. + It should require no arguments and return a dict-like object. + + adjlist_inner_dict_factory : function, (default: dict) + Factory function to be used to create the adjacency list + dict which holds edge data keyed by neighbor. + It should require no arguments and return a dict-like object + + edge_attr_dict_factory : function, (default: dict) + Factory function to be used to create the edge attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + graph_attr_dict_factory : function, (default: dict) + Factory function to be used to create the graph attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + Typically, if your extension doesn't impact the data structure all + methods will inherit without issue except: `to_directed/to_undirected`. + By default these methods create a DiGraph/Graph class and you probably + want them to create your extension of a DiGraph/Graph. To facilitate + this we define two class variables that you can set in your subclass. + + to_directed_class : callable, (default: DiGraph or MultiDiGraph) + Class to create a new graph structure in the `to_directed` method. + If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used. + + to_undirected_class : callable, (default: Graph or MultiGraph) + Class to create a new graph structure in the `to_undirected` method. + If `None`, a NetworkX class (Graph or MultiGraph) is used. + + **Subclassing Example** + + Create a low memory graph class that effectively disallows edge + attributes by using a single attribute dict for all edges. + This reduces the memory used, but you lose edge attributes. + + >>> class ThinGraph(nx.Graph): + ... all_edge_dict = {"weight": 1} + ... + ... def single_edge_dict(self): + ... return self.all_edge_dict + ... + ... edge_attr_dict_factory = single_edge_dict + >>> G = ThinGraph() + >>> G.add_edge(2, 1) + >>> G[2][1] + {'weight': 1} + >>> G.add_edge(2, 2) + >>> G[2][1] is G[2][2] + True + """ + + __networkx_backend__ = "networkx" + + _adj = _CachedPropertyResetterAdj() + _node = _CachedPropertyResetterNode() + + node_dict_factory = dict + node_attr_dict_factory = dict + adjlist_outer_dict_factory = dict + adjlist_inner_dict_factory = dict + edge_attr_dict_factory = dict + graph_attr_dict_factory = dict + + def to_directed_class(self): + """Returns the class to use for empty directed copies. + + If you subclass the base classes, use this to designate + what directed class to use for `to_directed()` copies. + """ + return nx.DiGraph + + def to_undirected_class(self): + """Returns the class to use for empty undirected copies. + + If you subclass the base classes, use this to designate + what directed class to use for `to_directed()` copies. + """ + return Graph + + def __init__(self, incoming_graph_data=None, **attr): + """Initialize a graph with edges, name, or graph attributes. + + Parameters + ---------- + incoming_graph_data : input graph (optional, default: None) + Data to initialize graph. If None (default) an empty + graph is created. The data can be an edge list, or any + NetworkX graph object. If the corresponding optional Python + packages are installed the data can also be a 2D NumPy array, a + SciPy sparse array, or a PyGraphviz graph. + + attr : keyword arguments, optional (default= no attributes) + Attributes to add to graph as key=value pairs. + + See Also + -------- + convert + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph(name="my graph") + >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges + >>> G = nx.Graph(e) + + Arbitrary graph attribute pairs (key=value) may be assigned + + >>> G = nx.Graph(e, day="Friday") + >>> G.graph + {'day': 'Friday'} + + """ + self.graph = self.graph_attr_dict_factory() # dictionary for graph attributes + self._node = self.node_dict_factory() # empty node attribute dict + self._adj = self.adjlist_outer_dict_factory() # empty adjacency dict + self.__networkx_cache__ = {} + # attempt to load graph with data + if incoming_graph_data is not None: + convert.to_networkx_graph(incoming_graph_data, create_using=self) + # load graph attributes (must be after convert) + self.graph.update(attr) + + @cached_property + def adj(self): + """Graph adjacency object holding the neighbors of each node. + + This object is a read-only dict-like structure with node keys + and neighbor-dict values. The neighbor-dict is keyed by neighbor + to the edge-data-dict. So `G.adj[3][2]['color'] = 'blue'` sets + the color of the edge `(3, 2)` to `"blue"`. + + Iterating over G.adj behaves like a dict. Useful idioms include + `for nbr, datadict in G.adj[n].items():`. + + The neighbor information is also provided by subscripting the graph. + So `for nbr, foovalue in G[node].data('foo', default=1):` works. + + For directed graphs, `G.adj` holds outgoing (successor) info. + """ + return AdjacencyView(self._adj) + + @property + def name(self): + """String identifier of the graph. + + This graph attribute appears in the attribute dict G.graph + keyed by the string `"name"`. as well as an attribute (technically + a property) `G.name`. This is entirely user controlled. + """ + return self.graph.get("name", "") + + @name.setter + def name(self, s): + self.graph["name"] = s + nx._clear_cache(self) + + def __str__(self): + """Returns a short summary of the graph. + + Returns + ------- + info : string + Graph information including the graph name (if any), graph type, and the + number of nodes and edges. + + Examples + -------- + >>> G = nx.Graph(name="foo") + >>> str(G) + "Graph named 'foo' with 0 nodes and 0 edges" + + >>> G = nx.path_graph(3) + >>> str(G) + 'Graph with 3 nodes and 2 edges' + + """ + return "".join( + [ + type(self).__name__, + f" named {self.name!r}" if self.name else "", + f" with {self.number_of_nodes()} nodes and {self.number_of_edges()} edges", + ] + ) + + def __iter__(self): + """Iterate over the nodes. Use: 'for n in G'. + + Returns + ------- + niter : iterator + An iterator over all nodes in the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> [n for n in G] + [0, 1, 2, 3] + >>> list(G) + [0, 1, 2, 3] + """ + return iter(self._node) + + def __contains__(self, n): + """Returns True if n is a node, False otherwise. Use: 'n in G'. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> 1 in G + True + """ + try: + return n in self._node + except TypeError: + return False + + def __len__(self): + """Returns the number of nodes in the graph. Use: 'len(G)'. + + Returns + ------- + nnodes : int + The number of nodes in the graph. + + See Also + -------- + number_of_nodes: identical method + order: identical method + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> len(G) + 4 + + """ + return len(self._node) + + def __getitem__(self, n): + """Returns a dict of neighbors of node n. Use: 'G[n]'. + + Parameters + ---------- + n : node + A node in the graph. + + Returns + ------- + adj_dict : dictionary + The adjacency dictionary for nodes connected to n. + + Notes + ----- + G[n] is the same as G.adj[n] and similar to G.neighbors(n) + (which is an iterator over G.adj[n]) + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G[0] + AtlasView({1: {}}) + """ + return self.adj[n] + + def add_node(self, node_for_adding, **attr): + """Add a single node `node_for_adding` and update node attributes. + + Parameters + ---------- + node_for_adding : node + A node can be any hashable Python object except None. + attr : keyword arguments, optional + Set or change node attributes using key=value. + + See Also + -------- + add_nodes_from + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_node(1) + >>> G.add_node("Hello") + >>> K3 = nx.Graph([(0, 1), (1, 2), (2, 0)]) + >>> G.add_node(K3) + >>> G.number_of_nodes() + 3 + + Use keywords set/change node attributes: + + >>> G.add_node(1, size=10) + >>> G.add_node(3, weight=0.4, UTM=("13S", 382871, 3972649)) + + Notes + ----- + A hashable object is one that can be used as a key in a Python + dictionary. This includes strings, numbers, tuples of strings + and numbers, etc. + + On many platforms hashable items also include mutables such as + NetworkX Graphs, though one should be careful that the hash + doesn't change on mutables. + """ + if node_for_adding not in self._node: + if node_for_adding is None: + raise ValueError("None cannot be a node") + self._adj[node_for_adding] = self.adjlist_inner_dict_factory() + attr_dict = self._node[node_for_adding] = self.node_attr_dict_factory() + attr_dict.update(attr) + else: # update attr even if node already exists + self._node[node_for_adding].update(attr) + nx._clear_cache(self) + + def add_nodes_from(self, nodes_for_adding, **attr): + """Add multiple nodes. + + Parameters + ---------- + nodes_for_adding : iterable container + A container of nodes (list, dict, set, etc.). + OR + A container of (node, attribute dict) tuples. + Node attributes are updated using the attribute dict. + attr : keyword arguments, optional (default= no attributes) + Update attributes for all nodes in nodes. + Node attributes specified in nodes as a tuple take + precedence over attributes specified via keyword arguments. + + See Also + -------- + add_node + + Notes + ----- + When adding nodes from an iterator over the graph you are changing, + a `RuntimeError` can be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_nodes)`, and pass this + object to `G.add_nodes_from`. + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_nodes_from("Hello") + >>> K3 = nx.Graph([(0, 1), (1, 2), (2, 0)]) + >>> G.add_nodes_from(K3) + >>> sorted(G.nodes(), key=str) + [0, 1, 2, 'H', 'e', 'l', 'o'] + + Use keywords to update specific node attributes for every node. + + >>> G.add_nodes_from([1, 2], size=10) + >>> G.add_nodes_from([3, 4], weight=0.4) + + Use (node, attrdict) tuples to update attributes for specific nodes. + + >>> G.add_nodes_from([(1, dict(size=11)), (2, {"color": "blue"})]) + >>> G.nodes[1]["size"] + 11 + >>> H = nx.Graph() + >>> H.add_nodes_from(G.nodes(data=True)) + >>> H.nodes[1]["size"] + 11 + + Evaluate an iterator over a graph if using it to modify the same graph + + >>> G = nx.Graph([(0, 1), (1, 2), (3, 4)]) + >>> # wrong way - will raise RuntimeError + >>> # G.add_nodes_from(n + 1 for n in G.nodes) + >>> # correct way + >>> G.add_nodes_from(list(n + 1 for n in G.nodes)) + """ + for n in nodes_for_adding: + try: + newnode = n not in self._node + newdict = attr + except TypeError: + n, ndict = n + newnode = n not in self._node + newdict = attr.copy() + newdict.update(ndict) + if newnode: + if n is None: + raise ValueError("None cannot be a node") + self._adj[n] = self.adjlist_inner_dict_factory() + self._node[n] = self.node_attr_dict_factory() + self._node[n].update(newdict) + nx._clear_cache(self) + + def remove_node(self, n): + """Remove node n. + + Removes the node n and all adjacent edges. + Attempting to remove a nonexistent node will raise an exception. + + Parameters + ---------- + n : node + A node in the graph + + Raises + ------ + NetworkXError + If n is not in the graph. + + See Also + -------- + remove_nodes_from + + Examples + -------- + >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> list(G.edges) + [(0, 1), (1, 2)] + >>> G.remove_node(1) + >>> list(G.edges) + [] + + """ + adj = self._adj + try: + nbrs = list(adj[n]) # list handles self-loops (allows mutation) + del self._node[n] + except KeyError as err: # NetworkXError if n not in self + raise NetworkXError(f"The node {n} is not in the graph.") from err + for u in nbrs: + del adj[u][n] # remove all edges n-u in graph + del adj[n] # now remove node + nx._clear_cache(self) + + def remove_nodes_from(self, nodes): + """Remove multiple nodes. + + Parameters + ---------- + nodes : iterable container + A container of nodes (list, dict, set, etc.). If a node + in the container is not in the graph it is silently + ignored. + + See Also + -------- + remove_node + + Notes + ----- + When removing nodes from an iterator over the graph you are changing, + a `RuntimeError` will be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_nodes)`, and pass this + object to `G.remove_nodes_from`. + + Examples + -------- + >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> e = list(G.nodes) + >>> e + [0, 1, 2] + >>> G.remove_nodes_from(e) + >>> list(G.nodes) + [] + + Evaluate an iterator over a graph if using it to modify the same graph + + >>> G = nx.Graph([(0, 1), (1, 2), (3, 4)]) + >>> # this command will fail, as the graph's dict is modified during iteration + >>> # G.remove_nodes_from(n for n in G.nodes if n < 2) + >>> # this command will work, since the dictionary underlying graph is not modified + >>> G.remove_nodes_from(list(n for n in G.nodes if n < 2)) + """ + adj = self._adj + for n in nodes: + try: + del self._node[n] + for u in list(adj[n]): # list handles self-loops + del adj[u][n] # (allows mutation of dict in loop) + del adj[n] + except KeyError: + pass + nx._clear_cache(self) + + @cached_property + def nodes(self): + """A NodeView of the Graph as G.nodes or G.nodes(). + + Can be used as `G.nodes` for data lookup and for set-like operations. + Can also be used as `G.nodes(data='color', default=None)` to return a + NodeDataView which reports specific node data but no set operations. + It presents a dict-like interface as well with `G.nodes.items()` + iterating over `(node, nodedata)` 2-tuples and `G.nodes[3]['foo']` + providing the value of the `foo` attribute for node `3`. In addition, + a view `G.nodes.data('foo')` provides a dict-like interface to the + `foo` attribute of each node. `G.nodes.data('foo', default=1)` + provides a default for nodes that do not have attribute `foo`. + + Parameters + ---------- + data : string or bool, optional (default=False) + The node attribute returned in 2-tuple (n, ddict[data]). + If True, return entire node attribute dict as (n, ddict). + If False, return just the nodes n. + + default : value, optional (default=None) + Value used for nodes that don't have the requested attribute. + Only relevant if data is not True or False. + + Returns + ------- + NodeView + Allows set-like operations over the nodes as well as node + attribute dict lookup and calling to get a NodeDataView. + A NodeDataView iterates over `(n, data)` and has no set operations. + A NodeView iterates over `n` and includes set operations. + + When called, if data is False, an iterator over nodes. + Otherwise an iterator of 2-tuples (node, attribute value) + where the attribute is specified in `data`. + If data is True then the attribute becomes the + entire data dictionary. + + Notes + ----- + If your node data is not needed, it is simpler and equivalent + to use the expression ``for n in G``, or ``list(G)``. + + Examples + -------- + There are two simple ways of getting a list of all nodes in the graph: + + >>> G = nx.path_graph(3) + >>> list(G.nodes) + [0, 1, 2] + >>> list(G) + [0, 1, 2] + + To get the node data along with the nodes: + + >>> G.add_node(1, time="5pm") + >>> G.nodes[0]["foo"] = "bar" + >>> list(G.nodes(data=True)) + [(0, {'foo': 'bar'}), (1, {'time': '5pm'}), (2, {})] + >>> list(G.nodes.data()) + [(0, {'foo': 'bar'}), (1, {'time': '5pm'}), (2, {})] + + >>> list(G.nodes(data="foo")) + [(0, 'bar'), (1, None), (2, None)] + >>> list(G.nodes.data("foo")) + [(0, 'bar'), (1, None), (2, None)] + + >>> list(G.nodes(data="time")) + [(0, None), (1, '5pm'), (2, None)] + >>> list(G.nodes.data("time")) + [(0, None), (1, '5pm'), (2, None)] + + >>> list(G.nodes(data="time", default="Not Available")) + [(0, 'Not Available'), (1, '5pm'), (2, 'Not Available')] + >>> list(G.nodes.data("time", default="Not Available")) + [(0, 'Not Available'), (1, '5pm'), (2, 'Not Available')] + + If some of your nodes have an attribute and the rest are assumed + to have a default attribute value you can create a dictionary + from node/attribute pairs using the `default` keyword argument + to guarantee the value is never None:: + + >>> G = nx.Graph() + >>> G.add_node(0) + >>> G.add_node(1, weight=2) + >>> G.add_node(2, weight=3) + >>> dict(G.nodes(data="weight", default=1)) + {0: 1, 1: 2, 2: 3} + + """ + return NodeView(self) + + def number_of_nodes(self): + """Returns the number of nodes in the graph. + + Returns + ------- + nnodes : int + The number of nodes in the graph. + + See Also + -------- + order: identical method + __len__: identical method + + Examples + -------- + >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.number_of_nodes() + 3 + """ + return len(self._node) + + def order(self): + """Returns the number of nodes in the graph. + + Returns + ------- + nnodes : int + The number of nodes in the graph. + + See Also + -------- + number_of_nodes: identical method + __len__: identical method + + Examples + -------- + >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.order() + 3 + """ + return len(self._node) + + def has_node(self, n): + """Returns True if the graph contains the node n. + + Identical to `n in G` + + Parameters + ---------- + n : node + + Examples + -------- + >>> G = nx.path_graph(3) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.has_node(0) + True + + It is more readable and simpler to use + + >>> 0 in G + True + + """ + try: + return n in self._node + except TypeError: + return False + + def add_edge(self, u_of_edge, v_of_edge, **attr): + """Add an edge between u and v. + + The nodes u and v will be automatically added if they are + not already in the graph. + + Edge attributes can be specified with keywords or by directly + accessing the edge's attribute dictionary. See examples below. + + Parameters + ---------- + u_of_edge, v_of_edge : nodes + Nodes can be, for example, strings or numbers. + Nodes must be hashable (and not None) Python objects. + attr : keyword arguments, optional + Edge data (or labels or objects) can be assigned using + keyword arguments. + + See Also + -------- + add_edges_from : add a collection of edges + + Notes + ----- + Adding an edge that already exists updates the edge data. + + Many NetworkX algorithms designed for weighted graphs use + an edge attribute (by default `weight`) to hold a numerical value. + + Examples + -------- + The following all add the edge e=(1, 2) to graph G: + + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> e = (1, 2) + >>> G.add_edge(1, 2) # explicit two-node form + >>> G.add_edge(*e) # single edge as tuple of two nodes + >>> G.add_edges_from([(1, 2)]) # add edges from iterable container + + Associate data to edges using keywords: + + >>> G.add_edge(1, 2, weight=3) + >>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7) + + For non-string attribute keys, use subscript notation. + + >>> G.add_edge(1, 2) + >>> G[1][2].update({0: 5}) + >>> G.edges[1, 2].update({0: 5}) + """ + u, v = u_of_edge, v_of_edge + # add nodes + if u not in self._node: + if u is None: + raise ValueError("None cannot be a node") + self._adj[u] = self.adjlist_inner_dict_factory() + self._node[u] = self.node_attr_dict_factory() + if v not in self._node: + if v is None: + raise ValueError("None cannot be a node") + self._adj[v] = self.adjlist_inner_dict_factory() + self._node[v] = self.node_attr_dict_factory() + # add the edge + datadict = self._adj[u].get(v, self.edge_attr_dict_factory()) + datadict.update(attr) + self._adj[u][v] = datadict + self._adj[v][u] = datadict + nx._clear_cache(self) + + def add_edges_from(self, ebunch_to_add, **attr): + """Add all the edges in ebunch_to_add. + + Parameters + ---------- + ebunch_to_add : container of edges + Each edge given in the container will be added to the + graph. The edges must be given as 2-tuples (u, v) or + 3-tuples (u, v, d) where d is a dictionary containing edge data. + attr : keyword arguments, optional + Edge data (or labels or objects) can be assigned using + keyword arguments. + + See Also + -------- + add_edge : add a single edge + add_weighted_edges_from : convenient way to add weighted edges + + Notes + ----- + Adding the same edge twice has no effect but any edge data + will be updated when each duplicate edge is added. + + Edge attributes specified in an ebunch take precedence over + attributes specified via keyword arguments. + + When adding edges from an iterator over the graph you are changing, + a `RuntimeError` can be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_edges)`, and pass this + object to `G.add_edges_from`. + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples + >>> e = zip(range(0, 3), range(1, 4)) + >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 + + Associate data to edges + + >>> G.add_edges_from([(1, 2), (2, 3)], weight=3) + >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898") + + Evaluate an iterator over a graph if using it to modify the same graph + + >>> G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + >>> # Grow graph by one new node, adding edges to all existing nodes. + >>> # wrong way - will raise RuntimeError + >>> # G.add_edges_from(((5, n) for n in G.nodes)) + >>> # correct way - note that there will be no self-edge for node 5 + >>> G.add_edges_from(list((5, n) for n in G.nodes)) + """ + for e in ebunch_to_add: + ne = len(e) + if ne == 3: + u, v, dd = e + elif ne == 2: + u, v = e + dd = {} # doesn't need edge_attr_dict_factory + else: + raise NetworkXError(f"Edge tuple {e} must be a 2-tuple or 3-tuple.") + if u not in self._node: + if u is None: + raise ValueError("None cannot be a node") + self._adj[u] = self.adjlist_inner_dict_factory() + self._node[u] = self.node_attr_dict_factory() + if v not in self._node: + if v is None: + raise ValueError("None cannot be a node") + self._adj[v] = self.adjlist_inner_dict_factory() + self._node[v] = self.node_attr_dict_factory() + datadict = self._adj[u].get(v, self.edge_attr_dict_factory()) + datadict.update(attr) + datadict.update(dd) + self._adj[u][v] = datadict + self._adj[v][u] = datadict + nx._clear_cache(self) + + def add_weighted_edges_from(self, ebunch_to_add, weight="weight", **attr): + """Add weighted edges in `ebunch_to_add` with specified weight attr + + Parameters + ---------- + ebunch_to_add : container of edges + Each edge given in the list or container will be added + to the graph. The edges must be given as 3-tuples (u, v, w) + where w is a number. + weight : string, optional (default= 'weight') + The attribute name for the edge weights to be added. + attr : keyword arguments, optional (default= no attributes) + Edge attributes to add/update for all edges. + + See Also + -------- + add_edge : add a single edge + add_edges_from : add multiple edges + + Notes + ----- + Adding the same edge twice for Graph/DiGraph simply updates + the edge data. For MultiGraph/MultiDiGraph, duplicate edges + are stored. + + When adding edges from an iterator over the graph you are changing, + a `RuntimeError` can be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_edges)`, and pass this + object to `G.add_weighted_edges_from`. + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_weighted_edges_from([(0, 1, 3.0), (1, 2, 7.5)]) + + Evaluate an iterator over edges before passing it + + >>> G = nx.Graph([(1, 2), (2, 3), (3, 4)]) + >>> weight = 0.1 + >>> # Grow graph by one new node, adding edges to all existing nodes. + >>> # wrong way - will raise RuntimeError + >>> # G.add_weighted_edges_from(((5, n, weight) for n in G.nodes)) + >>> # correct way - note that there will be no self-edge for node 5 + >>> G.add_weighted_edges_from(list((5, n, weight) for n in G.nodes)) + """ + self.add_edges_from(((u, v, {weight: d}) for u, v, d in ebunch_to_add), **attr) + nx._clear_cache(self) + + def remove_edge(self, u, v): + """Remove the edge between u and v. + + Parameters + ---------- + u, v : nodes + Remove the edge between nodes u and v. + + Raises + ------ + NetworkXError + If there is not an edge between u and v. + + See Also + -------- + remove_edges_from : remove a collection of edges + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, etc + >>> G.remove_edge(0, 1) + >>> e = (1, 2) + >>> G.remove_edge(*e) # unpacks e from an edge tuple + >>> e = (2, 3, {"weight": 7}) # an edge with attribute data + >>> G.remove_edge(*e[:2]) # select first part of edge tuple + """ + try: + del self._adj[u][v] + if u != v: # self-loop needs only one entry removed + del self._adj[v][u] + except KeyError as err: + raise NetworkXError(f"The edge {u}-{v} is not in the graph") from err + nx._clear_cache(self) + + def remove_edges_from(self, ebunch): + """Remove all edges specified in ebunch. + + Parameters + ---------- + ebunch: list or container of edge tuples + Each edge given in the list or container will be removed + from the graph. The edges can be: + + - 2-tuples (u, v) edge between u and v. + - 3-tuples (u, v, k) where k is ignored. + + See Also + -------- + remove_edge : remove a single edge + + Notes + ----- + Will fail silently if an edge in ebunch is not in the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> ebunch = [(1, 2), (2, 3)] + >>> G.remove_edges_from(ebunch) + """ + adj = self._adj + for e in ebunch: + u, v = e[:2] # ignore edge data if present + if u in adj and v in adj[u]: + del adj[u][v] + if u != v: # self loop needs only one entry removed + del adj[v][u] + nx._clear_cache(self) + + def update(self, edges=None, nodes=None): + """Update the graph using nodes/edges/graphs as input. + + Like dict.update, this method takes a graph as input, adding the + graph's nodes and edges to this graph. It can also take two inputs: + edges and nodes. Finally it can take either edges or nodes. + To specify only nodes the keyword `nodes` must be used. + + The collections of edges and nodes are treated similarly to + the add_edges_from/add_nodes_from methods. When iterated, they + should yield 2-tuples (u, v) or 3-tuples (u, v, datadict). + + Parameters + ---------- + edges : Graph object, collection of edges, or None + The first parameter can be a graph or some edges. If it has + attributes `nodes` and `edges`, then it is taken to be a + Graph-like object and those attributes are used as collections + of nodes and edges to be added to the graph. + If the first parameter does not have those attributes, it is + treated as a collection of edges and added to the graph. + If the first argument is None, no edges are added. + nodes : collection of nodes, or None + The second parameter is treated as a collection of nodes + to be added to the graph unless it is None. + If `edges is None` and `nodes is None` an exception is raised. + If the first parameter is a Graph, then `nodes` is ignored. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> G.update(nx.complete_graph(range(4, 10))) + >>> from itertools import combinations + >>> edges = ( + ... (u, v, {"power": u * v}) + ... for u, v in combinations(range(10, 20), 2) + ... if u * v < 225 + ... ) + >>> nodes = [1000] # for singleton, use a container + >>> G.update(edges, nodes) + + Notes + ----- + It you want to update the graph using an adjacency structure + it is straightforward to obtain the edges/nodes from adjacency. + The following examples provide common cases, your adjacency may + be slightly different and require tweaks of these examples:: + + >>> # dict-of-set/list/tuple + >>> adj = {1: {2, 3}, 2: {1, 3}, 3: {1, 2}} + >>> e = [(u, v) for u, nbrs in adj.items() for v in nbrs] + >>> G.update(edges=e, nodes=adj) + + >>> DG = nx.DiGraph() + >>> # dict-of-dict-of-attribute + >>> adj = {1: {2: 1.3, 3: 0.7}, 2: {1: 1.4}, 3: {1: 0.7}} + >>> e = [ + ... (u, v, {"weight": d}) + ... for u, nbrs in adj.items() + ... for v, d in nbrs.items() + ... ] + >>> DG.update(edges=e, nodes=adj) + + >>> # dict-of-dict-of-dict + >>> adj = {1: {2: {"weight": 1.3}, 3: {"color": 0.7, "weight": 1.2}}} + >>> e = [ + ... (u, v, {"weight": d}) + ... for u, nbrs in adj.items() + ... for v, d in nbrs.items() + ... ] + >>> DG.update(edges=e, nodes=adj) + + >>> # predecessor adjacency (dict-of-set) + >>> pred = {1: {2, 3}, 2: {3}, 3: {3}} + >>> e = [(v, u) for u, nbrs in pred.items() for v in nbrs] + + >>> # MultiGraph dict-of-dict-of-dict-of-attribute + >>> MDG = nx.MultiDiGraph() + >>> adj = { + ... 1: {2: {0: {"weight": 1.3}, 1: {"weight": 1.2}}}, + ... 3: {2: {0: {"weight": 0.7}}}, + ... } + >>> e = [ + ... (u, v, ekey, d) + ... for u, nbrs in adj.items() + ... for v, keydict in nbrs.items() + ... for ekey, d in keydict.items() + ... ] + >>> MDG.update(edges=e) + + See Also + -------- + add_edges_from: add multiple edges to a graph + add_nodes_from: add multiple nodes to a graph + """ + if edges is not None: + if nodes is not None: + self.add_nodes_from(nodes) + self.add_edges_from(edges) + else: + # check if edges is a Graph object + try: + graph_nodes = edges.nodes + graph_edges = edges.edges + except AttributeError: + # edge not Graph-like + self.add_edges_from(edges) + else: # edges is Graph-like + self.add_nodes_from(graph_nodes.data()) + self.add_edges_from(graph_edges.data()) + self.graph.update(edges.graph) + elif nodes is not None: + self.add_nodes_from(nodes) + else: + raise NetworkXError("update needs nodes or edges input") + + def has_edge(self, u, v): + """Returns True if the edge (u, v) is in the graph. + + This is the same as `v in G[u]` without KeyError exceptions. + + Parameters + ---------- + u, v : nodes + Nodes can be, for example, strings or numbers. + Nodes must be hashable (and not None) Python objects. + + Returns + ------- + edge_ind : bool + True if edge is in the graph, False otherwise. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.has_edge(0, 1) # using two nodes + True + >>> e = (0, 1) + >>> G.has_edge(*e) # e is a 2-tuple (u, v) + True + >>> e = (0, 1, {"weight": 7}) + >>> G.has_edge(*e[:2]) # e is a 3-tuple (u, v, data_dictionary) + True + + The following syntax are equivalent: + + >>> G.has_edge(0, 1) + True + >>> 1 in G[0] # though this gives KeyError if 0 not in G + True + + """ + try: + return v in self._adj[u] + except KeyError: + return False + + def neighbors(self, n): + """Returns an iterator over all neighbors of node n. + + This is identical to `iter(G[n])` + + Parameters + ---------- + n : node + A node in the graph + + Returns + ------- + neighbors : iterator + An iterator over all neighbors of node n + + Raises + ------ + NetworkXError + If the node n is not in the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> [n for n in G.neighbors(0)] + [1] + + Notes + ----- + Alternate ways to access the neighbors are ``G.adj[n]`` or ``G[n]``: + + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edge("a", "b", weight=7) + >>> G["a"] + AtlasView({'b': {'weight': 7}}) + >>> G = nx.path_graph(4) + >>> [n for n in G[0]] + [1] + """ + try: + return iter(self._adj[n]) + except KeyError as err: + raise NetworkXError(f"The node {n} is not in the graph.") from err + + @cached_property + def edges(self): + """An EdgeView of the Graph as G.edges or G.edges(). + + edges(self, nbunch=None, data=False, default=None) + + The EdgeView provides set-like operations on the edge-tuples + as well as edge attribute lookup. When called, it also provides + an EdgeDataView object which allows control of access to edge + attributes (but does not provide set-like operations). + Hence, `G.edges[u, v]['color']` provides the value of the color + attribute for edge `(u, v)` while + `for (u, v, c) in G.edges.data('color', default='red'):` + iterates through all the edges yielding the color attribute + with default `'red'` if no color attribute exists. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges from these nodes. + data : string or bool, optional (default=False) + The edge attribute returned in 3-tuple (u, v, ddict[data]). + If True, return edge attribute dict in 3-tuple (u, v, ddict). + If False, return 2-tuple (u, v). + default : value, optional (default=None) + Value used for edges that don't have the requested attribute. + Only relevant if data is not True or False. + + Returns + ------- + edges : EdgeView + A view of edge attributes, usually it iterates over (u, v) + or (u, v, d) tuples of edges, but can also be used for + attribute lookup as `edges[u, v]['foo']`. + + Notes + ----- + Nodes in nbunch that are not in the graph will be (quietly) ignored. + For directed graphs this returns the out-edges. + + Examples + -------- + >>> G = nx.path_graph(3) # or MultiGraph, etc + >>> G.add_edge(2, 3, weight=5) + >>> [e for e in G.edges] + [(0, 1), (1, 2), (2, 3)] + >>> G.edges.data() # default data is {} (empty dict) + EdgeDataView([(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})]) + >>> G.edges.data("weight", default=1) + EdgeDataView([(0, 1, 1), (1, 2, 1), (2, 3, 5)]) + >>> G.edges([0, 3]) # only edges from these nodes + EdgeDataView([(0, 1), (3, 2)]) + >>> G.edges(0) # only edges from node 0 + EdgeDataView([(0, 1)]) + """ + return EdgeView(self) + + def get_edge_data(self, u, v, default=None): + """Returns the attribute dictionary associated with edge (u, v). + + This is identical to `G[u][v]` except the default is returned + instead of an exception if the edge doesn't exist. + + Parameters + ---------- + u, v : nodes + default: any Python object (default=None) + Value to return if the edge (u, v) is not found. + + Returns + ------- + edge_dict : dictionary + The edge attribute dictionary. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G[0][1] + {} + + Warning: Assigning to `G[u][v]` is not permitted. + But it is safe to assign attributes `G[u][v]['foo']` + + >>> G[0][1]["weight"] = 7 + >>> G[0][1]["weight"] + 7 + >>> G[1][0]["weight"] + 7 + + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.get_edge_data(0, 1) # default edge data is {} + {} + >>> e = (0, 1) + >>> G.get_edge_data(*e) # tuple form + {} + >>> G.get_edge_data("a", "b", default=0) # edge not in graph, return 0 + 0 + """ + try: + return self._adj[u][v] + except KeyError: + return default + + def adjacency(self): + """Returns an iterator over (node, adjacency dict) tuples for all nodes. + + For directed graphs, only outgoing neighbors/adjacencies are included. + + Returns + ------- + adj_iter : iterator + An iterator over (node, adjacency dictionary) for all nodes in + the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> [(n, nbrdict) for n, nbrdict in G.adjacency()] + [(0, {1: {}}), (1, {0: {}, 2: {}}), (2, {1: {}, 3: {}}), (3, {2: {}})] + + """ + return iter(self._adj.items()) + + @cached_property + def degree(self): + """A DegreeView for the Graph as G.degree or G.degree(). + + The node degree is the number of edges adjacent to the node. + The weighted node degree is the sum of the edge weights for + edges incident to that node. + + This object provides an iterator for (node, degree) as well as + lookup for the degree for a single node. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + DegreeView or int + If multiple nodes are requested (the default), returns a `DegreeView` + mapping nodes to their degree. + If a single node is requested, returns the degree of the node as an integer. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.degree[0] # node 0 has degree 1 + 1 + >>> list(G.degree([0, 1, 2])) + [(0, 1), (1, 2), (2, 2)] + """ + return DegreeView(self) + + def clear(self): + """Remove all nodes and edges from the graph. + + This also removes the name, and all graph, node, and edge attributes. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.clear() + >>> list(G.nodes) + [] + >>> list(G.edges) + [] + + """ + self._adj.clear() + self._node.clear() + self.graph.clear() + nx._clear_cache(self) + + def clear_edges(self): + """Remove all edges from the graph without altering nodes. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.clear_edges() + >>> list(G.nodes) + [0, 1, 2, 3] + >>> list(G.edges) + [] + """ + for nbr_dict in self._adj.values(): + nbr_dict.clear() + nx._clear_cache(self) + + def is_multigraph(self): + """Returns True if graph is a multigraph, False otherwise.""" + return False + + def is_directed(self): + """Returns True if graph is directed, False otherwise.""" + return False + + def copy(self, as_view=False): + """Returns a copy of the graph. + + The copy method by default returns an independent shallow copy + of the graph and attributes. That is, if an attribute is a + container, that container is shared by the original an the copy. + Use Python's `copy.deepcopy` for new containers. + + If `as_view` is True then a view is returned instead of a copy. + + Notes + ----- + All copies reproduce the graph structure, but data attributes + may be handled in different ways. There are four types of copies + of a graph that people might want. + + Deepcopy -- A "deepcopy" copies the graph structure as well as + all data attributes and any objects they might contain. + The entire graph object is new so that changes in the copy + do not affect the original object. (see Python's copy.deepcopy) + + Data Reference (Shallow) -- For a shallow copy the graph structure + is copied but the edge, node and graph attribute dicts are + references to those in the original graph. This saves + time and memory but could cause confusion if you change an attribute + in one graph and it changes the attribute in the other. + NetworkX does not provide this level of shallow copy. + + Independent Shallow -- This copy creates new independent attribute + dicts and then does a shallow copy of the attributes. That is, any + attributes that are containers are shared between the new graph + and the original. This is exactly what `dict.copy()` provides. + You can obtain this style copy using: + + >>> G = nx.path_graph(5) + >>> H = G.copy() + >>> H = G.copy(as_view=False) + >>> H = nx.Graph(G) + >>> H = G.__class__(G) + + Fresh Data -- For fresh data, the graph structure is copied while + new empty data attribute dicts are created. The resulting graph + is independent of the original and it has no edge, node or graph + attributes. Fresh copies are not enabled. Instead use: + + >>> H = G.__class__() + >>> H.add_nodes_from(G) + >>> H.add_edges_from(G.edges) + + View -- Inspired by dict-views, graph-views act like read-only + versions of the original graph, providing a copy of the original + structure without requiring any memory for copying the information. + + See the Python copy module for more information on shallow + and deep copies, https://docs.python.org/3/library/copy.html. + + Parameters + ---------- + as_view : bool, optional (default=False) + If True, the returned graph-view provides a read-only view + of the original graph without actually copying any data. + + Returns + ------- + G : Graph + A copy of the graph. + + See Also + -------- + to_directed: return a directed copy of the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> H = G.copy() + + """ + if as_view is True: + return nx.graphviews.generic_graph_view(self) + G = self.__class__() + G.graph.update(self.graph) + G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) + G.add_edges_from( + (u, v, datadict.copy()) + for u, nbrs in self._adj.items() + for v, datadict in nbrs.items() + ) + return G + + def to_directed(self, as_view=False): + """Returns a directed representation of the graph. + + Returns + ------- + G : DiGraph + A directed graph with the same name, same nodes, and with + each edge (u, v, data) replaced by two directed edges + (u, v, data) and (v, u, data). + + Notes + ----- + This returns a "deepcopy" of the edge, node, and + graph attributes which attempts to completely copy + all of the data and references. + + This is in contrast to the similar D=DiGraph(G) which returns a + shallow copy of the data. + + See the Python copy module for more information on shallow + and deep copies, https://docs.python.org/3/library/copy.html. + + Warning: If you have subclassed Graph to use dict-like objects + in the data structure, those changes do not transfer to the + DiGraph created by this method. + + Examples + -------- + >>> G = nx.Graph() # or MultiGraph, etc + >>> G.add_edge(0, 1) + >>> H = G.to_directed() + >>> list(H.edges) + [(0, 1), (1, 0)] + + If already directed, return a (deep) copy + + >>> G = nx.DiGraph() # or MultiDiGraph, etc + >>> G.add_edge(0, 1) + >>> H = G.to_directed() + >>> list(H.edges) + [(0, 1)] + """ + graph_class = self.to_directed_class() + if as_view is True: + return nx.graphviews.generic_graph_view(self, graph_class) + # deepcopy when not a view + G = graph_class() + G.graph.update(deepcopy(self.graph)) + G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) + G.add_edges_from( + (u, v, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, data in nbrs.items() + ) + return G + + def to_undirected(self, as_view=False): + """Returns an undirected copy of the graph. + + Parameters + ---------- + as_view : bool (optional, default=False) + If True return a view of the original undirected graph. + + Returns + ------- + G : Graph/MultiGraph + A deepcopy of the graph. + + See Also + -------- + Graph, copy, add_edge, add_edges_from + + Notes + ----- + This returns a "deepcopy" of the edge, node, and + graph attributes which attempts to completely copy + all of the data and references. + + This is in contrast to the similar `G = nx.DiGraph(D)` which returns a + shallow copy of the data. + + See the Python copy module for more information on shallow + and deep copies, https://docs.python.org/3/library/copy.html. + + Warning: If you have subclassed DiGraph to use dict-like objects + in the data structure, those changes do not transfer to the + Graph created by this method. + + Examples + -------- + >>> G = nx.path_graph(2) # or MultiGraph, etc + >>> H = G.to_directed() + >>> list(H.edges) + [(0, 1), (1, 0)] + >>> G2 = H.to_undirected() + >>> list(G2.edges) + [(0, 1)] + """ + graph_class = self.to_undirected_class() + if as_view is True: + return nx.graphviews.generic_graph_view(self, graph_class) + # deepcopy when not a view + G = graph_class() + G.graph.update(deepcopy(self.graph)) + G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) + G.add_edges_from( + (u, v, deepcopy(d)) + for u, nbrs in self._adj.items() + for v, d in nbrs.items() + ) + return G + + def subgraph(self, nodes): + """Returns a SubGraph view of the subgraph induced on `nodes`. + + The induced subgraph of the graph contains the nodes in `nodes` + and the edges between those nodes. + + Parameters + ---------- + nodes : list, iterable + A container of nodes which will be iterated through once. + + Returns + ------- + G : SubGraph View + A subgraph view of the graph. The graph structure cannot be + changed but node/edge attributes can and are shared with the + original graph. + + Notes + ----- + The graph, edge and node attributes are shared with the original graph. + Changes to the graph structure is ruled out by the view, but changes + to attributes are reflected in the original graph. + + To create a subgraph with its own copy of the edge/node attributes use: + G.subgraph(nodes).copy() + + For an inplace reduction of a graph to a subgraph you can remove nodes: + G.remove_nodes_from([n for n in G if n not in set(nodes)]) + + Subgraph views are sometimes NOT what you want. In most cases where + you want to do more than simply look at the induced edges, it makes + more sense to just create the subgraph as its own graph with code like: + + :: + + # Create a subgraph SG based on a (possibly multigraph) G + SG = G.__class__() + SG.add_nodes_from((n, G.nodes[n]) for n in largest_wcc) + if SG.is_multigraph(): + SG.add_edges_from( + (n, nbr, key, d) + for n, nbrs in G.adj.items() + if n in largest_wcc + for nbr, keydict in nbrs.items() + if nbr in largest_wcc + for key, d in keydict.items() + ) + else: + SG.add_edges_from( + (n, nbr, d) + for n, nbrs in G.adj.items() + if n in largest_wcc + for nbr, d in nbrs.items() + if nbr in largest_wcc + ) + SG.graph.update(G.graph) + + Subgraphs are not guaranteed to preserve the order of nodes or edges + as they appear in the original graph. For example: + + >>> G = nx.Graph() + >>> G.add_nodes_from(reversed(range(10))) + >>> list(G) + [9, 8, 7, 6, 5, 4, 3, 2, 1, 0] + >>> list(G.subgraph([1, 3, 2])) + [1, 2, 3] + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> H = G.subgraph([0, 1, 2]) + >>> list(H.edges) + [(0, 1), (1, 2)] + """ + induced_nodes = nx.filters.show_nodes(self.nbunch_iter(nodes)) + # if already a subgraph, don't make a chain + subgraph = nx.subgraph_view + if hasattr(self, "_NODE_OK"): + return subgraph( + self._graph, filter_node=induced_nodes, filter_edge=self._EDGE_OK + ) + return subgraph(self, filter_node=induced_nodes) + + def edge_subgraph(self, edges): + """Returns the subgraph induced by the specified edges. + + The induced subgraph contains each edge in `edges` and each + node incident to any one of those edges. + + Parameters + ---------- + edges : iterable + An iterable of edges in this graph. + + Returns + ------- + G : Graph + An edge-induced subgraph of this graph with the same edge + attributes. + + Notes + ----- + The graph, edge, and node attributes in the returned subgraph + view are references to the corresponding attributes in the original + graph. The view is read-only. + + To create a full graph version of the subgraph with its own copy + of the edge or node attributes, use:: + + G.edge_subgraph(edges).copy() + + Examples + -------- + >>> G = nx.path_graph(5) + >>> H = G.edge_subgraph([(0, 1), (3, 4)]) + >>> list(H.nodes) + [0, 1, 3, 4] + >>> list(H.edges) + [(0, 1), (3, 4)] + + """ + return nx.edge_subgraph(self, edges) + + def size(self, weight=None): + """Returns the number of edges or total of all edge weights. + + Parameters + ---------- + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + + Returns + ------- + size : numeric + The number of edges or + (if weight keyword is provided) the total weight sum. + + If weight is None, returns an int. Otherwise a float + (or more general numeric if the weights are more general). + + See Also + -------- + number_of_edges + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.size() + 3 + + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edge("a", "b", weight=2) + >>> G.add_edge("b", "c", weight=4) + >>> G.size() + 2 + >>> G.size(weight="weight") + 6.0 + """ + s = sum(d for v, d in self.degree(weight=weight)) + # If `weight` is None, the sum of the degrees is guaranteed to be + # even, so we can perform integer division and hence return an + # integer. Otherwise, the sum of the weighted degrees is not + # guaranteed to be an integer, so we perform "real" division. + return s // 2 if weight is None else s / 2 + + def number_of_edges(self, u=None, v=None): + """Returns the number of edges between two nodes. + + Parameters + ---------- + u, v : nodes, optional (default=all edges) + If u and v are specified, return the number of edges between + u and v. Otherwise return the total number of all edges. + + Returns + ------- + nedges : int + The number of edges in the graph. If nodes `u` and `v` are + specified return the number of edges between those nodes. If + the graph is directed, this only returns the number of edges + from `u` to `v`. + + See Also + -------- + size + + Examples + -------- + For undirected graphs, this method counts the total number of + edges in the graph: + + >>> G = nx.path_graph(4) + >>> G.number_of_edges() + 3 + + If you specify two nodes, this counts the total number of edges + joining the two nodes: + + >>> G.number_of_edges(0, 1) + 1 + + For directed graphs, this method can count the total number of + directed edges from `u` to `v`: + + >>> G = nx.DiGraph() + >>> G.add_edge(0, 1) + >>> G.add_edge(1, 0) + >>> G.number_of_edges(0, 1) + 1 + + """ + if u is None: + return int(self.size()) + if v in self._adj[u]: + return 1 + return 0 + + def nbunch_iter(self, nbunch=None): + """Returns an iterator over nodes contained in nbunch that are + also in the graph. + + The nodes in an iterable nbunch are checked for membership in the graph + and if not are silently ignored. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + Returns + ------- + niter : iterator + An iterator over nodes in nbunch that are also in the graph. + If nbunch is None, iterate over all nodes in the graph. + + Raises + ------ + NetworkXError + If nbunch is not a node or sequence of nodes. + If a node in nbunch is not hashable. + + See Also + -------- + Graph.__iter__ + + Notes + ----- + When nbunch is an iterator, the returned iterator yields values + directly from nbunch, becoming exhausted when nbunch is exhausted. + + To test whether nbunch is a single node, one can use + "if nbunch in self:", even after processing with this routine. + + If nbunch is not a node or a (possibly empty) sequence/iterator + or None, a :exc:`NetworkXError` is raised. Also, if any object in + nbunch is not hashable, a :exc:`NetworkXError` is raised. + """ + if nbunch is None: # include all nodes via iterator + bunch = iter(self._adj) + elif nbunch in self: # if nbunch is a single node + bunch = iter([nbunch]) + else: # if nbunch is a sequence of nodes + + def bunch_iter(nlist, adj): + try: + for n in nlist: + if n in adj: + yield n + except TypeError as err: + exc, message = err, err.args[0] + # capture error for non-sequence/iterator nbunch. + if "iter" in message: + exc = NetworkXError( + "nbunch is not a node or a sequence of nodes." + ) + # capture single nodes that are not in the graph. + if "object is not iterable" in message: + exc = NetworkXError(f"Node {nbunch} is not in the graph.") + # capture error for unhashable node. + if "hashable" in message: + exc = NetworkXError( + f"Node {n} in sequence nbunch is not a valid node." + ) + raise exc + + bunch = bunch_iter(nbunch, self._adj) + return bunch diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/graphviews.py b/.venv/lib/python3.12/site-packages/networkx/classes/graphviews.py new file mode 100644 index 0000000..0b09df6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/graphviews.py @@ -0,0 +1,269 @@ +"""View of Graphs as SubGraph, Reverse, Directed, Undirected. + +In some algorithms it is convenient to temporarily morph +a graph to exclude some nodes or edges. It should be better +to do that via a view than to remove and then re-add. +In other algorithms it is convenient to temporarily morph +a graph to reverse directed edges, or treat a directed graph +as undirected, etc. This module provides those graph views. + +The resulting views are essentially read-only graphs that +report data from the original graph object. We provide an +attribute G._graph which points to the underlying graph object. + +Note: Since graphviews look like graphs, one can end up with +view-of-view-of-view chains. Be careful with chains because +they become very slow with about 15 nested views. +For the common simple case of node induced subgraphs created +from the graph class, we short-cut the chain by returning a +subgraph of the original graph directly rather than a subgraph +of a subgraph. We are careful not to disrupt any edge filter in +the middle subgraph. In general, determining how to short-cut +the chain is tricky and much harder with restricted_views than +with induced subgraphs. +Often it is easiest to use .copy() to avoid chains. +""" + +import networkx as nx +from networkx.classes.coreviews import ( + FilterAdjacency, + FilterAtlas, + FilterMultiAdjacency, + UnionAdjacency, + UnionMultiAdjacency, +) +from networkx.classes.filters import no_filter +from networkx.exception import NetworkXError +from networkx.utils import not_implemented_for + +__all__ = ["generic_graph_view", "subgraph_view", "reverse_view"] + + +def generic_graph_view(G, create_using=None): + """Returns a read-only view of `G`. + + The graph `G` and its attributes are not copied but viewed through the new graph object + of the same class as `G` (or of the class specified in `create_using`). + + Parameters + ---------- + G : graph + A directed/undirected graph/multigraph. + + create_using : NetworkX graph constructor, optional (default=None) + Graph type to create. If graph instance, then cleared before populated. + If `None`, then the appropriate Graph type is inferred from `G`. + + Returns + ------- + newG : graph + A view of the input graph `G` and its attributes as viewed through + the `create_using` class. + + Raises + ------ + NetworkXError + If `G` is a multigraph (or multidigraph) but `create_using` is not, or vice versa. + + Notes + ----- + The returned graph view is read-only (cannot modify the graph). + Yet the view reflects any changes in `G`. The intent is to mimic dict views. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=0.3) + >>> G.add_edge(2, 3, weight=0.5) + >>> G.edges(data=True) + EdgeDataView([(1, 2, {'weight': 0.3}), (2, 3, {'weight': 0.5})]) + + The view exposes the attributes from the original graph. + + >>> viewG = nx.graphviews.generic_graph_view(G) + >>> viewG.edges(data=True) + EdgeDataView([(1, 2, {'weight': 0.3}), (2, 3, {'weight': 0.5})]) + + Changes to `G` are reflected in `viewG`. + + >>> G.remove_edge(2, 3) + >>> G.edges(data=True) + EdgeDataView([(1, 2, {'weight': 0.3})]) + + >>> viewG.edges(data=True) + EdgeDataView([(1, 2, {'weight': 0.3})]) + + We can change the graph type with the `create_using` parameter. + + >>> type(G) + + >>> viewDG = nx.graphviews.generic_graph_view(G, create_using=nx.DiGraph) + >>> type(viewDG) + + """ + if create_using is None: + newG = G.__class__() + else: + newG = nx.empty_graph(0, create_using) + if G.is_multigraph() != newG.is_multigraph(): + raise NetworkXError("Multigraph for G must agree with create_using") + newG = nx.freeze(newG) + + # create view by assigning attributes from G + newG._graph = G + newG.graph = G.graph + + newG._node = G._node + if newG.is_directed(): + if G.is_directed(): + newG._succ = G._succ + newG._pred = G._pred + # newG._adj is synced with _succ + else: + newG._succ = G._adj + newG._pred = G._adj + # newG._adj is synced with _succ + elif G.is_directed(): + if G.is_multigraph(): + newG._adj = UnionMultiAdjacency(G._succ, G._pred) + else: + newG._adj = UnionAdjacency(G._succ, G._pred) + else: + newG._adj = G._adj + return newG + + +def subgraph_view(G, *, filter_node=no_filter, filter_edge=no_filter): + """View of `G` applying a filter on nodes and edges. + + `subgraph_view` provides a read-only view of the input graph that excludes + nodes and edges based on the outcome of two filter functions `filter_node` + and `filter_edge`. + + The `filter_node` function takes one argument --- the node --- and returns + `True` if the node should be included in the subgraph, and `False` if it + should not be included. + + The `filter_edge` function takes two (or three arguments if `G` is a + multi-graph) --- the nodes describing an edge, plus the edge-key if + parallel edges are possible --- and returns `True` if the edge should be + included in the subgraph, and `False` if it should not be included. + + Both node and edge filter functions are called on graph elements as they + are queried, meaning there is no up-front cost to creating the view. + + Parameters + ---------- + G : networkx.Graph + A directed/undirected graph/multigraph + + filter_node : callable, optional + A function taking a node as input, which returns `True` if the node + should appear in the view. + + filter_edge : callable, optional + A function taking as input the two nodes describing an edge (plus the + edge-key if `G` is a multi-graph), which returns `True` if the edge + should appear in the view. + + Returns + ------- + graph : networkx.Graph + A read-only graph view of the input graph. + + Examples + -------- + >>> G = nx.path_graph(6) + + Filter functions operate on the node, and return `True` if the node should + appear in the view: + + >>> def filter_node(n1): + ... return n1 != 5 + >>> view = nx.subgraph_view(G, filter_node=filter_node) + >>> view.nodes() + NodeView((0, 1, 2, 3, 4)) + + We can use a closure pattern to filter graph elements based on additional + data --- for example, filtering on edge data attached to the graph: + + >>> G[3][4]["cross_me"] = False + >>> def filter_edge(n1, n2): + ... return G[n1][n2].get("cross_me", True) + >>> view = nx.subgraph_view(G, filter_edge=filter_edge) + >>> view.edges() + EdgeView([(0, 1), (1, 2), (2, 3), (4, 5)]) + + >>> view = nx.subgraph_view( + ... G, + ... filter_node=filter_node, + ... filter_edge=filter_edge, + ... ) + >>> view.nodes() + NodeView((0, 1, 2, 3, 4)) + >>> view.edges() + EdgeView([(0, 1), (1, 2), (2, 3)]) + """ + newG = nx.freeze(G.__class__()) + newG._NODE_OK = filter_node + newG._EDGE_OK = filter_edge + + # create view by assigning attributes from G + newG._graph = G + newG.graph = G.graph + + newG._node = FilterAtlas(G._node, filter_node) + if G.is_multigraph(): + Adj = FilterMultiAdjacency + + def reverse_edge(u, v, k=None): + return filter_edge(v, u, k) + + else: + Adj = FilterAdjacency + + def reverse_edge(u, v, k=None): + return filter_edge(v, u) + + if G.is_directed(): + newG._succ = Adj(G._succ, filter_node, filter_edge) + newG._pred = Adj(G._pred, filter_node, reverse_edge) + # newG._adj is synced with _succ + else: + newG._adj = Adj(G._adj, filter_node, filter_edge) + return newG + + +@not_implemented_for("undirected") +def reverse_view(G): + """View of `G` with edge directions reversed + + `reverse_view` returns a read-only view of the input graph where + edge directions are reversed. + + Identical to digraph.reverse(copy=False) + + Parameters + ---------- + G : networkx.DiGraph + + Returns + ------- + graph : networkx.DiGraph + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edge(1, 2) + >>> G.add_edge(2, 3) + >>> G.edges() + OutEdgeView([(1, 2), (2, 3)]) + + >>> view = nx.reverse_view(G) + >>> view.edges() + OutEdgeView([(2, 1), (3, 2)]) + """ + newG = generic_graph_view(G) + newG._succ, newG._pred = G._pred, G._succ + # newG._adj is synced with _succ + return newG diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/multidigraph.py b/.venv/lib/python3.12/site-packages/networkx/classes/multidigraph.py new file mode 100644 index 0000000..597af79 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/multidigraph.py @@ -0,0 +1,966 @@ +"""Base class for MultiDiGraph.""" + +from copy import deepcopy +from functools import cached_property + +import networkx as nx +from networkx import convert +from networkx.classes.coreviews import MultiAdjacencyView +from networkx.classes.digraph import DiGraph +from networkx.classes.multigraph import MultiGraph +from networkx.classes.reportviews import ( + DiMultiDegreeView, + InMultiDegreeView, + InMultiEdgeView, + OutMultiDegreeView, + OutMultiEdgeView, +) +from networkx.exception import NetworkXError + +__all__ = ["MultiDiGraph"] + + +class MultiDiGraph(MultiGraph, DiGraph): + """A directed graph class that can store multiedges. + + Multiedges are multiple edges between two nodes. Each edge + can hold optional data or attributes. + + A MultiDiGraph holds directed edges. Self loops are allowed. + + Nodes can be arbitrary (hashable) Python objects with optional + key/value attributes. By convention `None` is not used as a node. + + Edges are represented as links between nodes with optional + key/value attributes. + + Parameters + ---------- + incoming_graph_data : input graph (optional, default: None) + Data to initialize graph. If None (default) an empty + graph is created. The data can be any format that is supported + by the to_networkx_graph() function, currently including edge list, + dict of dicts, dict of lists, NetworkX graph, 2D NumPy array, SciPy + sparse matrix, or PyGraphviz graph. + + multigraph_input : bool or None (default None) + Note: Only used when `incoming_graph_data` is a dict. + If True, `incoming_graph_data` is assumed to be a + dict-of-dict-of-dict-of-dict structure keyed by + node to neighbor to edge keys to edge data for multi-edges. + A NetworkXError is raised if this is not the case. + If False, :func:`to_networkx_graph` is used to try to determine + the dict's graph data structure as either a dict-of-dict-of-dict + keyed by node to neighbor to edge data, or a dict-of-iterable + keyed by node to neighbors. + If None, the treatment for True is tried, but if it fails, + the treatment for False is tried. + + attr : keyword arguments, optional (default= no attributes) + Attributes to add to graph as key=value pairs. + + See Also + -------- + Graph + DiGraph + MultiGraph + + Examples + -------- + Create an empty graph structure (a "null graph") with no nodes and + no edges. + + >>> G = nx.MultiDiGraph() + + G can be grown in several ways. + + **Nodes:** + + Add one node at a time: + + >>> G.add_node(1) + + Add the nodes from any container (a list, dict, set or + even the lines from a file or the nodes from another graph). + + >>> G.add_nodes_from([2, 3]) + >>> G.add_nodes_from(range(100, 110)) + >>> H = nx.path_graph(10) + >>> G.add_nodes_from(H) + + In addition to strings and integers any hashable Python object + (except None) can represent a node, e.g. a customized node object, + or even another Graph. + + >>> G.add_node(H) + + **Edges:** + + G can also be grown by adding edges. + + Add one edge, + + >>> key = G.add_edge(1, 2) + + a list of edges, + + >>> keys = G.add_edges_from([(1, 2), (1, 3)]) + + or a collection of edges, + + >>> keys = G.add_edges_from(H.edges) + + If some edges connect nodes not yet in the graph, the nodes + are added automatically. If an edge already exists, an additional + edge is created and stored using a key to identify the edge. + By default the key is the lowest unused integer. + + >>> keys = G.add_edges_from([(4, 5, dict(route=282)), (4, 5, dict(route=37))]) + >>> G[4] + AdjacencyView({5: {0: {}, 1: {'route': 282}, 2: {'route': 37}}}) + + **Attributes:** + + Each graph, node, and edge can hold key/value attribute pairs + in an associated attribute dictionary (the keys must be hashable). + By default these are empty, but can be added or changed using + add_edge, add_node or direct manipulation of the attribute + dictionaries named graph, node and edge respectively. + + >>> G = nx.MultiDiGraph(day="Friday") + >>> G.graph + {'day': 'Friday'} + + Add node attributes using add_node(), add_nodes_from() or G.nodes + + >>> G.add_node(1, time="5pm") + >>> G.add_nodes_from([3], time="2pm") + >>> G.nodes[1] + {'time': '5pm'} + >>> G.nodes[1]["room"] = 714 + >>> del G.nodes[1]["room"] # remove attribute + >>> list(G.nodes(data=True)) + [(1, {'time': '5pm'}), (3, {'time': '2pm'})] + + Add edge attributes using add_edge(), add_edges_from(), subscript + notation, or G.edges. + + >>> key = G.add_edge(1, 2, weight=4.7) + >>> keys = G.add_edges_from([(3, 4), (4, 5)], color="red") + >>> keys = G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})]) + >>> G[1][2][0]["weight"] = 4.7 + >>> G.edges[1, 2, 0]["weight"] = 4 + + Warning: we protect the graph data structure by making `G.edges[1, + 2, 0]` a read-only dict-like structure. However, you can assign to + attributes in e.g. `G.edges[1, 2, 0]`. Thus, use 2 sets of brackets + to add/change data attributes: `G.edges[1, 2, 0]['weight'] = 4` + (for multigraphs the edge key is required: `MG.edges[u, v, + key][name] = value`). + + **Shortcuts:** + + Many common graph features allow python syntax to speed reporting. + + >>> 1 in G # check if node in graph + True + >>> [n for n in G if n < 3] # iterate through nodes + [1, 2] + >>> len(G) # number of nodes in graph + 5 + >>> G[1] # adjacency dict-like view mapping neighbor -> edge key -> edge attributes + AdjacencyView({2: {0: {'weight': 4}, 1: {'color': 'blue'}}}) + + Often the best way to traverse all edges of a graph is via the neighbors. + The neighbors are available as an adjacency-view `G.adj` object or via + the method `G.adjacency()`. + + >>> for n, nbrsdict in G.adjacency(): + ... for nbr, keydict in nbrsdict.items(): + ... for key, eattr in keydict.items(): + ... if "weight" in eattr: + ... # Do something useful with the edges + ... pass + + But the edges() method is often more convenient: + + >>> for u, v, keys, weight in G.edges(data="weight", keys=True): + ... if weight is not None: + ... # Do something useful with the edges + ... pass + + **Reporting:** + + Simple graph information is obtained using methods and object-attributes. + Reporting usually provides views instead of containers to reduce memory + usage. The views update as the graph is updated similarly to dict-views. + The objects `nodes`, `edges` and `adj` provide access to data attributes + via lookup (e.g. `nodes[n]`, `edges[u, v, k]`, `adj[u][v]`) and iteration + (e.g. `nodes.items()`, `nodes.data('color')`, + `nodes.data('color', default='blue')` and similarly for `edges`) + Views exist for `nodes`, `edges`, `neighbors()`/`adj` and `degree`. + + For details on these and other miscellaneous methods, see below. + + **Subclasses (Advanced):** + + The MultiDiGraph class uses a dict-of-dict-of-dict-of-dict structure. + The outer dict (node_dict) holds adjacency information keyed by node. + The next dict (adjlist_dict) represents the adjacency information + and holds edge_key dicts keyed by neighbor. The edge_key dict holds + each edge_attr dict keyed by edge key. The inner dict + (edge_attr_dict) represents the edge data and holds edge attribute + values keyed by attribute names. + + Each of these four dicts in the dict-of-dict-of-dict-of-dict + structure can be replaced by a user defined dict-like object. + In general, the dict-like features should be maintained but + extra features can be added. To replace one of the dicts create + a new graph class by changing the class(!) variable holding the + factory for that dict-like structure. The variable names are + node_dict_factory, node_attr_dict_factory, adjlist_inner_dict_factory, + adjlist_outer_dict_factory, edge_key_dict_factory, edge_attr_dict_factory + and graph_attr_dict_factory. + + node_dict_factory : function, (default: dict) + Factory function to be used to create the dict containing node + attributes, keyed by node id. + It should require no arguments and return a dict-like object + + node_attr_dict_factory: function, (default: dict) + Factory function to be used to create the node attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object + + adjlist_outer_dict_factory : function, (default: dict) + Factory function to be used to create the outer-most dict + in the data structure that holds adjacency info keyed by node. + It should require no arguments and return a dict-like object. + + adjlist_inner_dict_factory : function, (default: dict) + Factory function to be used to create the adjacency list + dict which holds multiedge key dicts keyed by neighbor. + It should require no arguments and return a dict-like object. + + edge_key_dict_factory : function, (default: dict) + Factory function to be used to create the edge key dict + which holds edge data keyed by edge key. + It should require no arguments and return a dict-like object. + + edge_attr_dict_factory : function, (default: dict) + Factory function to be used to create the edge attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + graph_attr_dict_factory : function, (default: dict) + Factory function to be used to create the graph attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + Typically, if your extension doesn't impact the data structure all + methods will inherited without issue except: `to_directed/to_undirected`. + By default these methods create a DiGraph/Graph class and you probably + want them to create your extension of a DiGraph/Graph. To facilitate + this we define two class variables that you can set in your subclass. + + to_directed_class : callable, (default: DiGraph or MultiDiGraph) + Class to create a new graph structure in the `to_directed` method. + If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used. + + to_undirected_class : callable, (default: Graph or MultiGraph) + Class to create a new graph structure in the `to_undirected` method. + If `None`, a NetworkX class (Graph or MultiGraph) is used. + + **Subclassing Example** + + Create a low memory graph class that effectively disallows edge + attributes by using a single attribute dict for all edges. + This reduces the memory used, but you lose edge attributes. + + >>> class ThinGraph(nx.Graph): + ... all_edge_dict = {"weight": 1} + ... + ... def single_edge_dict(self): + ... return self.all_edge_dict + ... + ... edge_attr_dict_factory = single_edge_dict + >>> G = ThinGraph() + >>> G.add_edge(2, 1) + >>> G[2][1] + {'weight': 1} + >>> G.add_edge(2, 2) + >>> G[2][1] is G[2][2] + True + """ + + # node_dict_factory = dict # already assigned in Graph + # adjlist_outer_dict_factory = dict + # adjlist_inner_dict_factory = dict + edge_key_dict_factory = dict + # edge_attr_dict_factory = dict + + def __init__(self, incoming_graph_data=None, multigraph_input=None, **attr): + """Initialize a graph with edges, name, or graph attributes. + + Parameters + ---------- + incoming_graph_data : input graph + Data to initialize graph. If incoming_graph_data=None (default) + an empty graph is created. The data can be an edge list, or any + NetworkX graph object. If the corresponding optional Python + packages are installed the data can also be a 2D NumPy array, a + SciPy sparse array, or a PyGraphviz graph. + + multigraph_input : bool or None (default None) + Note: Only used when `incoming_graph_data` is a dict. + If True, `incoming_graph_data` is assumed to be a + dict-of-dict-of-dict-of-dict structure keyed by + node to neighbor to edge keys to edge data for multi-edges. + A NetworkXError is raised if this is not the case. + If False, :func:`to_networkx_graph` is used to try to determine + the dict's graph data structure as either a dict-of-dict-of-dict + keyed by node to neighbor to edge data, or a dict-of-iterable + keyed by node to neighbors. + If None, the treatment for True is tried, but if it fails, + the treatment for False is tried. + + attr : keyword arguments, optional (default= no attributes) + Attributes to add to graph as key=value pairs. + + See Also + -------- + convert + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G = nx.Graph(name="my graph") + >>> e = [(1, 2), (2, 3), (3, 4)] # list of edges + >>> G = nx.Graph(e) + + Arbitrary graph attribute pairs (key=value) may be assigned + + >>> G = nx.Graph(e, day="Friday") + >>> G.graph + {'day': 'Friday'} + + """ + # multigraph_input can be None/True/False. So check "is not False" + if isinstance(incoming_graph_data, dict) and multigraph_input is not False: + DiGraph.__init__(self) + try: + convert.from_dict_of_dicts( + incoming_graph_data, create_using=self, multigraph_input=True + ) + self.graph.update(attr) + except Exception as err: + if multigraph_input is True: + raise nx.NetworkXError( + f"converting multigraph_input raised:\n{type(err)}: {err}" + ) + DiGraph.__init__(self, incoming_graph_data, **attr) + else: + DiGraph.__init__(self, incoming_graph_data, **attr) + + @cached_property + def adj(self): + """Graph adjacency object holding the neighbors of each node. + + This object is a read-only dict-like structure with node keys + and neighbor-dict values. The neighbor-dict is keyed by neighbor + to the edgekey-dict. So `G.adj[3][2][0]['color'] = 'blue'` sets + the color of the edge `(3, 2, 0)` to `"blue"`. + + Iterating over G.adj behaves like a dict. Useful idioms include + `for nbr, datadict in G.adj[n].items():`. + + The neighbor information is also provided by subscripting the graph. + So `for nbr, foovalue in G[node].data('foo', default=1):` works. + + For directed graphs, `G.adj` holds outgoing (successor) info. + """ + return MultiAdjacencyView(self._succ) + + @cached_property + def succ(self): + """Graph adjacency object holding the successors of each node. + + This object is a read-only dict-like structure with node keys + and neighbor-dict values. The neighbor-dict is keyed by neighbor + to the edgekey-dict. So `G.adj[3][2][0]['color'] = 'blue'` sets + the color of the edge `(3, 2, 0)` to `"blue"`. + + Iterating over G.adj behaves like a dict. Useful idioms include + `for nbr, datadict in G.adj[n].items():`. + + The neighbor information is also provided by subscripting the graph. + So `for nbr, foovalue in G[node].data('foo', default=1):` works. + + For directed graphs, `G.succ` is identical to `G.adj`. + """ + return MultiAdjacencyView(self._succ) + + @cached_property + def pred(self): + """Graph adjacency object holding the predecessors of each node. + + This object is a read-only dict-like structure with node keys + and neighbor-dict values. The neighbor-dict is keyed by neighbor + to the edgekey-dict. So `G.adj[3][2][0]['color'] = 'blue'` sets + the color of the edge `(3, 2, 0)` to `"blue"`. + + Iterating over G.adj behaves like a dict. Useful idioms include + `for nbr, datadict in G.adj[n].items():`. + """ + return MultiAdjacencyView(self._pred) + + def add_edge(self, u_for_edge, v_for_edge, key=None, **attr): + """Add an edge between u and v. + + The nodes u and v will be automatically added if they are + not already in the graph. + + Edge attributes can be specified with keywords or by directly + accessing the edge's attribute dictionary. See examples below. + + Parameters + ---------- + u_for_edge, v_for_edge : nodes + Nodes can be, for example, strings or numbers. + Nodes must be hashable (and not None) Python objects. + key : hashable identifier, optional (default=lowest unused integer) + Used to distinguish multiedges between a pair of nodes. + attr : keyword arguments, optional + Edge data (or labels or objects) can be assigned using + keyword arguments. + + Returns + ------- + The edge key assigned to the edge. + + See Also + -------- + add_edges_from : add a collection of edges + + Notes + ----- + To replace/update edge data, use the optional key argument + to identify a unique edge. Otherwise a new edge will be created. + + NetworkX algorithms designed for weighted graphs cannot use + multigraphs directly because it is not clear how to handle + multiedge weights. Convert to Graph using edge attribute + 'weight' to enable weighted graph algorithms. + + Default keys are generated using the method `new_edge_key()`. + This method can be overridden by subclassing the base class and + providing a custom `new_edge_key()` method. + + Examples + -------- + The following all add the edge e=(1, 2) to graph G: + + >>> G = nx.MultiDiGraph() + >>> e = (1, 2) + >>> key = G.add_edge(1, 2) # explicit two-node form + >>> G.add_edge(*e) # single edge as tuple of two nodes + 1 + >>> G.add_edges_from([(1, 2)]) # add edges from iterable container + [2] + + Associate data to edges using keywords: + + >>> key = G.add_edge(1, 2, weight=3) + >>> key = G.add_edge(1, 2, key=0, weight=4) # update data for key=0 + >>> key = G.add_edge(1, 3, weight=7, capacity=15, length=342.7) + + For non-string attribute keys, use subscript notation. + + >>> ekey = G.add_edge(1, 2) + >>> G[1][2][0].update({0: 5}) + >>> G.edges[1, 2, 0].update({0: 5}) + """ + u, v = u_for_edge, v_for_edge + # add nodes + if u not in self._succ: + if u is None: + raise ValueError("None cannot be a node") + self._succ[u] = self.adjlist_inner_dict_factory() + self._pred[u] = self.adjlist_inner_dict_factory() + self._node[u] = self.node_attr_dict_factory() + if v not in self._succ: + if v is None: + raise ValueError("None cannot be a node") + self._succ[v] = self.adjlist_inner_dict_factory() + self._pred[v] = self.adjlist_inner_dict_factory() + self._node[v] = self.node_attr_dict_factory() + if key is None: + key = self.new_edge_key(u, v) + if v in self._succ[u]: + keydict = self._adj[u][v] + datadict = keydict.get(key, self.edge_attr_dict_factory()) + datadict.update(attr) + keydict[key] = datadict + else: + # selfloops work this way without special treatment + datadict = self.edge_attr_dict_factory() + datadict.update(attr) + keydict = self.edge_key_dict_factory() + keydict[key] = datadict + self._succ[u][v] = keydict + self._pred[v][u] = keydict + nx._clear_cache(self) + return key + + def remove_edge(self, u, v, key=None): + """Remove an edge between u and v. + + Parameters + ---------- + u, v : nodes + Remove an edge between nodes u and v. + key : hashable identifier, optional (default=None) + Used to distinguish multiple edges between a pair of nodes. + If None, remove a single edge between u and v. If there are + multiple edges, removes the last edge added in terms of + insertion order. + + Raises + ------ + NetworkXError + If there is not an edge between u and v, or + if there is no edge with the specified key. + + See Also + -------- + remove_edges_from : remove a collection of edges + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.remove_edge(0, 1) + >>> e = (1, 2) + >>> G.remove_edge(*e) # unpacks e from an edge tuple + + For multiple edges + + >>> G = nx.MultiDiGraph() + >>> G.add_edges_from([(1, 2), (1, 2), (1, 2)]) # key_list returned + [0, 1, 2] + + When ``key=None`` (the default), edges are removed in the opposite + order that they were added: + + >>> G.remove_edge(1, 2) + >>> G.edges(keys=True) + OutMultiEdgeView([(1, 2, 0), (1, 2, 1)]) + + For edges with keys + + >>> G = nx.MultiDiGraph() + >>> G.add_edge(1, 2, key="first") + 'first' + >>> G.add_edge(1, 2, key="second") + 'second' + >>> G.remove_edge(1, 2, key="first") + >>> G.edges(keys=True) + OutMultiEdgeView([(1, 2, 'second')]) + + """ + try: + d = self._adj[u][v] + except KeyError as err: + raise NetworkXError(f"The edge {u}-{v} is not in the graph.") from err + # remove the edge with specified data + if key is None: + d.popitem() + else: + try: + del d[key] + except KeyError as err: + msg = f"The edge {u}-{v} with key {key} is not in the graph." + raise NetworkXError(msg) from err + if len(d) == 0: + # remove the key entries if last edge + del self._succ[u][v] + del self._pred[v][u] + nx._clear_cache(self) + + @cached_property + def edges(self): + """An OutMultiEdgeView of the Graph as G.edges or G.edges(). + + edges(self, nbunch=None, data=False, keys=False, default=None) + + The OutMultiEdgeView provides set-like operations on the edge-tuples + as well as edge attribute lookup. When called, it also provides + an EdgeDataView object which allows control of access to edge + attributes (but does not provide set-like operations). + Hence, ``G.edges[u, v, k]['color']`` provides the value of the color + attribute for the edge from ``u`` to ``v`` with key ``k`` while + ``for (u, v, k, c) in G.edges(data='color', default='red', keys=True):`` + iterates through all the edges yielding the color attribute with + default `'red'` if no color attribute exists. + + Edges are returned as tuples with optional data and keys + in the order (node, neighbor, key, data). If ``keys=True`` is not + provided, the tuples will just be (node, neighbor, data), but + multiple tuples with the same node and neighbor will be + generated when multiple edges between two nodes exist. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges from these nodes. + data : string or bool, optional (default=False) + The edge attribute returned in 3-tuple (u, v, ddict[data]). + If True, return edge attribute dict in 3-tuple (u, v, ddict). + If False, return 2-tuple (u, v). + keys : bool, optional (default=False) + If True, return edge keys with each edge, creating (u, v, k, + d) tuples when data is also requested (the default) and (u, + v, k) tuples when data is not requested. + default : value, optional (default=None) + Value used for edges that don't have the requested attribute. + Only relevant if data is not True or False. + + Returns + ------- + edges : OutMultiEdgeView + A view of edge attributes, usually it iterates over (u, v) + (u, v, k) or (u, v, k, d) tuples of edges, but can also be + used for attribute lookup as ``edges[u, v, k]['foo']``. + + Notes + ----- + Nodes in nbunch that are not in the graph will be (quietly) ignored. + For directed graphs this returns the out-edges. + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> nx.add_path(G, [0, 1, 2]) + >>> key = G.add_edge(2, 3, weight=5) + >>> key2 = G.add_edge(1, 2) # second edge between these nodes + >>> [e for e in G.edges()] + [(0, 1), (1, 2), (1, 2), (2, 3)] + >>> list(G.edges(data=True)) # default data is {} (empty dict) + [(0, 1, {}), (1, 2, {}), (1, 2, {}), (2, 3, {'weight': 5})] + >>> list(G.edges(data="weight", default=1)) + [(0, 1, 1), (1, 2, 1), (1, 2, 1), (2, 3, 5)] + >>> list(G.edges(keys=True)) # default keys are integers + [(0, 1, 0), (1, 2, 0), (1, 2, 1), (2, 3, 0)] + >>> list(G.edges(data=True, keys=True)) + [(0, 1, 0, {}), (1, 2, 0, {}), (1, 2, 1, {}), (2, 3, 0, {'weight': 5})] + >>> list(G.edges(data="weight", default=1, keys=True)) + [(0, 1, 0, 1), (1, 2, 0, 1), (1, 2, 1, 1), (2, 3, 0, 5)] + >>> list(G.edges([0, 2])) + [(0, 1), (2, 3)] + >>> list(G.edges(0)) + [(0, 1)] + >>> list(G.edges(1)) + [(1, 2), (1, 2)] + + See Also + -------- + in_edges, out_edges + """ + return OutMultiEdgeView(self) + + # alias out_edges to edges + @cached_property + def out_edges(self): + return OutMultiEdgeView(self) + + out_edges.__doc__ = edges.__doc__ + + @cached_property + def in_edges(self): + """A view of the in edges of the graph as G.in_edges or G.in_edges(). + + in_edges(self, nbunch=None, data=False, keys=False, default=None) + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + data : string or bool, optional (default=False) + The edge attribute returned in 3-tuple (u, v, ddict[data]). + If True, return edge attribute dict in 3-tuple (u, v, ddict). + If False, return 2-tuple (u, v). + keys : bool, optional (default=False) + If True, return edge keys with each edge, creating 3-tuples + (u, v, k) or with data, 4-tuples (u, v, k, d). + default : value, optional (default=None) + Value used for edges that don't have the requested attribute. + Only relevant if data is not True or False. + + Returns + ------- + in_edges : InMultiEdgeView or InMultiEdgeDataView + A view of edge attributes, usually it iterates over (u, v) + or (u, v, k) or (u, v, k, d) tuples of edges, but can also be + used for attribute lookup as `edges[u, v, k]['foo']`. + + See Also + -------- + edges + """ + return InMultiEdgeView(self) + + @cached_property + def degree(self): + """A DegreeView for the Graph as G.degree or G.degree(). + + The node degree is the number of edges adjacent to the node. + The weighted node degree is the sum of the edge weights for + edges incident to that node. + + This object provides an iterator for (node, degree) as well as + lookup for the degree for a single node. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + DiMultiDegreeView or int + If multiple nodes are requested (the default), returns a `DiMultiDegreeView` + mapping nodes to their degree. + If a single node is requested, returns the degree of the node as an integer. + + See Also + -------- + out_degree, in_degree + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.degree(0) # node 0 with degree 1 + 1 + >>> list(G.degree([0, 1, 2])) + [(0, 1), (1, 2), (2, 2)] + >>> G.add_edge(0, 1) # parallel edge + 1 + >>> list(G.degree([0, 1, 2])) # parallel edges are counted + [(0, 2), (1, 3), (2, 2)] + + """ + return DiMultiDegreeView(self) + + @cached_property + def in_degree(self): + """A DegreeView for (node, in_degree) or in_degree for single node. + + The node in-degree is the number of edges pointing into the node. + The weighted node degree is the sum of the edge weights for + edges incident to that node. + + This object provides an iterator for (node, degree) as well as + lookup for the degree for a single node. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + If a single node is requested + deg : int + Degree of the node + + OR if multiple nodes are requested + nd_iter : iterator + The iterator returns two-tuples of (node, in-degree). + + See Also + -------- + degree, out_degree + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.in_degree(0) # node 0 with degree 0 + 0 + >>> list(G.in_degree([0, 1, 2])) + [(0, 0), (1, 1), (2, 1)] + >>> G.add_edge(0, 1) # parallel edge + 1 + >>> list(G.in_degree([0, 1, 2])) # parallel edges counted + [(0, 0), (1, 2), (2, 1)] + + """ + return InMultiDegreeView(self) + + @cached_property + def out_degree(self): + """Returns an iterator for (node, out-degree) or out-degree for single node. + + out_degree(self, nbunch=None, weight=None) + + The node out-degree is the number of edges pointing out of the node. + This function returns the out-degree for a single node or an iterator + for a bunch of nodes or if nothing is passed as argument. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights. + + Returns + ------- + If a single node is requested + deg : int + Degree of the node + + OR if multiple nodes are requested + nd_iter : iterator + The iterator returns two-tuples of (node, out-degree). + + See Also + -------- + degree, in_degree + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.out_degree(0) # node 0 with degree 1 + 1 + >>> list(G.out_degree([0, 1, 2])) + [(0, 1), (1, 1), (2, 1)] + >>> G.add_edge(0, 1) # parallel edge + 1 + >>> list(G.out_degree([0, 1, 2])) # counts parallel edges + [(0, 2), (1, 1), (2, 1)] + + """ + return OutMultiDegreeView(self) + + def is_multigraph(self): + """Returns True if graph is a multigraph, False otherwise.""" + return True + + def is_directed(self): + """Returns True if graph is directed, False otherwise.""" + return True + + def to_undirected(self, reciprocal=False, as_view=False): + """Returns an undirected representation of the digraph. + + Parameters + ---------- + reciprocal : bool (optional) + If True only keep edges that appear in both directions + in the original digraph. + as_view : bool (optional, default=False) + If True return an undirected view of the original directed graph. + + Returns + ------- + G : MultiGraph + An undirected graph with the same name and nodes and + with edge (u, v, data) if either (u, v, data) or (v, u, data) + is in the digraph. If both edges exist in digraph and + their edge data is different, only one edge is created + with an arbitrary choice of which edge data to use. + You must check and correct for this manually if desired. + + See Also + -------- + MultiGraph, copy, add_edge, add_edges_from + + Notes + ----- + This returns a "deepcopy" of the edge, node, and + graph attributes which attempts to completely copy + all of the data and references. + + This is in contrast to the similar D=MultiDiGraph(G) which + returns a shallow copy of the data. + + See the Python copy module for more information on shallow + and deep copies, https://docs.python.org/3/library/copy.html. + + Warning: If you have subclassed MultiDiGraph to use dict-like + objects in the data structure, those changes do not transfer + to the MultiGraph created by this method. + + Examples + -------- + >>> G = nx.path_graph(2) # or MultiGraph, etc + >>> H = G.to_directed() + >>> list(H.edges) + [(0, 1), (1, 0)] + >>> G2 = H.to_undirected() + >>> list(G2.edges) + [(0, 1)] + """ + graph_class = self.to_undirected_class() + if as_view is True: + return nx.graphviews.generic_graph_view(self, graph_class) + # deepcopy when not a view + G = graph_class() + G.graph.update(deepcopy(self.graph)) + G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) + if reciprocal is True: + G.add_edges_from( + (u, v, key, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, data in keydict.items() + if v in self._pred[u] and key in self._pred[u][v] + ) + else: + G.add_edges_from( + (u, v, key, deepcopy(data)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, data in keydict.items() + ) + return G + + def reverse(self, copy=True): + """Returns the reverse of the graph. + + The reverse is a graph with the same nodes and edges + but with the directions of the edges reversed. + + Parameters + ---------- + copy : bool optional (default=True) + If True, return a new DiGraph holding the reversed edges. + If False, the reverse graph is created using a view of + the original graph. + """ + if copy: + H = self.__class__() + H.graph.update(deepcopy(self.graph)) + H.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) + H.add_edges_from( + (v, u, k, deepcopy(d)) + for u, v, k, d in self.edges(keys=True, data=True) + ) + return H + return nx.reverse_view(self) diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/multigraph.py b/.venv/lib/python3.12/site-packages/networkx/classes/multigraph.py new file mode 100644 index 0000000..0e3f1ae --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/multigraph.py @@ -0,0 +1,1283 @@ +"""Base class for MultiGraph.""" + +from copy import deepcopy +from functools import cached_property + +import networkx as nx +from networkx import NetworkXError, convert +from networkx.classes.coreviews import MultiAdjacencyView +from networkx.classes.graph import Graph +from networkx.classes.reportviews import MultiDegreeView, MultiEdgeView + +__all__ = ["MultiGraph"] + + +class MultiGraph(Graph): + """ + An undirected graph class that can store multiedges. + + Multiedges are multiple edges between two nodes. Each edge + can hold optional data or attributes. + + A MultiGraph holds undirected edges. Self loops are allowed. + + Nodes can be arbitrary (hashable) Python objects with optional + key/value attributes. By convention `None` is not used as a node. + + Edges are represented as links between nodes with optional + key/value attributes, in a MultiGraph each edge has a key to + distinguish between multiple edges that have the same source and + destination nodes. + + Parameters + ---------- + incoming_graph_data : input graph (optional, default: None) + Data to initialize graph. If None (default) an empty + graph is created. The data can be any format that is supported + by the to_networkx_graph() function, currently including edge list, + dict of dicts, dict of lists, NetworkX graph, 2D NumPy array, + SciPy sparse array, or PyGraphviz graph. + + multigraph_input : bool or None (default None) + Note: Only used when `incoming_graph_data` is a dict. + If True, `incoming_graph_data` is assumed to be a + dict-of-dict-of-dict-of-dict structure keyed by + node to neighbor to edge keys to edge data for multi-edges. + A NetworkXError is raised if this is not the case. + If False, :func:`to_networkx_graph` is used to try to determine + the dict's graph data structure as either a dict-of-dict-of-dict + keyed by node to neighbor to edge data, or a dict-of-iterable + keyed by node to neighbors. + If None, the treatment for True is tried, but if it fails, + the treatment for False is tried. + + attr : keyword arguments, optional (default= no attributes) + Attributes to add to graph as key=value pairs. + + See Also + -------- + Graph + DiGraph + MultiDiGraph + + Examples + -------- + Create an empty graph structure (a "null graph") with no nodes and + no edges. + + >>> G = nx.MultiGraph() + + G can be grown in several ways. + + **Nodes:** + + Add one node at a time: + + >>> G.add_node(1) + + Add the nodes from any container (a list, dict, set or + even the lines from a file or the nodes from another graph). + + >>> G.add_nodes_from([2, 3]) + >>> G.add_nodes_from(range(100, 110)) + >>> H = nx.path_graph(10) + >>> G.add_nodes_from(H) + + In addition to strings and integers any hashable Python object + (except None) can represent a node, e.g. a customized node object, + or even another Graph. + + >>> G.add_node(H) + + **Edges:** + + G can also be grown by adding edges. + + Add one edge, + + >>> key = G.add_edge(1, 2) + + a list of edges, + + >>> keys = G.add_edges_from([(1, 2), (1, 3)]) + + or a collection of edges, + + >>> keys = G.add_edges_from(H.edges) + + If some edges connect nodes not yet in the graph, the nodes + are added automatically. If an edge already exists, an additional + edge is created and stored using a key to identify the edge. + By default the key is the lowest unused integer. + + >>> keys = G.add_edges_from([(4, 5, {"route": 28}), (4, 5, {"route": 37})]) + >>> G[4] + AdjacencyView({3: {0: {}}, 5: {0: {}, 1: {'route': 28}, 2: {'route': 37}}}) + + **Attributes:** + + Each graph, node, and edge can hold key/value attribute pairs + in an associated attribute dictionary (the keys must be hashable). + By default these are empty, but can be added or changed using + add_edge, add_node or direct manipulation of the attribute + dictionaries named graph, node and edge respectively. + + >>> G = nx.MultiGraph(day="Friday") + >>> G.graph + {'day': 'Friday'} + + Add node attributes using add_node(), add_nodes_from() or G.nodes + + >>> G.add_node(1, time="5pm") + >>> G.add_nodes_from([3], time="2pm") + >>> G.nodes[1] + {'time': '5pm'} + >>> G.nodes[1]["room"] = 714 + >>> del G.nodes[1]["room"] # remove attribute + >>> list(G.nodes(data=True)) + [(1, {'time': '5pm'}), (3, {'time': '2pm'})] + + Add edge attributes using add_edge(), add_edges_from(), subscript + notation, or G.edges. + + >>> key = G.add_edge(1, 2, weight=4.7) + >>> keys = G.add_edges_from([(3, 4), (4, 5)], color="red") + >>> keys = G.add_edges_from([(1, 2, {"color": "blue"}), (2, 3, {"weight": 8})]) + >>> G[1][2][0]["weight"] = 4.7 + >>> G.edges[1, 2, 0]["weight"] = 4 + + Warning: we protect the graph data structure by making `G.edges[1, + 2, 0]` a read-only dict-like structure. However, you can assign to + attributes in e.g. `G.edges[1, 2, 0]`. Thus, use 2 sets of brackets + to add/change data attributes: `G.edges[1, 2, 0]['weight'] = 4`. + + **Shortcuts:** + + Many common graph features allow python syntax to speed reporting. + + >>> 1 in G # check if node in graph + True + >>> [n for n in G if n < 3] # iterate through nodes + [1, 2] + >>> len(G) # number of nodes in graph + 5 + >>> G[1] # adjacency dict-like view mapping neighbor -> edge key -> edge attributes + AdjacencyView({2: {0: {'weight': 4}, 1: {'color': 'blue'}}}) + + Often the best way to traverse all edges of a graph is via the neighbors. + The neighbors are reported as an adjacency-dict `G.adj` or `G.adjacency()`. + + >>> for n, nbrsdict in G.adjacency(): + ... for nbr, keydict in nbrsdict.items(): + ... for key, eattr in keydict.items(): + ... if "weight" in eattr: + ... # Do something useful with the edges + ... pass + + But the edges() method is often more convenient: + + >>> for u, v, keys, weight in G.edges(data="weight", keys=True): + ... if weight is not None: + ... # Do something useful with the edges + ... pass + + **Reporting:** + + Simple graph information is obtained using methods and object-attributes. + Reporting usually provides views instead of containers to reduce memory + usage. The views update as the graph is updated similarly to dict-views. + The objects `nodes`, `edges` and `adj` provide access to data attributes + via lookup (e.g. `nodes[n]`, `edges[u, v, k]`, `adj[u][v]`) and iteration + (e.g. `nodes.items()`, `nodes.data('color')`, + `nodes.data('color', default='blue')` and similarly for `edges`) + Views exist for `nodes`, `edges`, `neighbors()`/`adj` and `degree`. + + For details on these and other miscellaneous methods, see below. + + **Subclasses (Advanced):** + + The MultiGraph class uses a dict-of-dict-of-dict-of-dict data structure. + The outer dict (node_dict) holds adjacency information keyed by node. + The next dict (adjlist_dict) represents the adjacency information + and holds edge_key dicts keyed by neighbor. The edge_key dict holds + each edge_attr dict keyed by edge key. The inner dict + (edge_attr_dict) represents the edge data and holds edge attribute + values keyed by attribute names. + + Each of these four dicts in the dict-of-dict-of-dict-of-dict + structure can be replaced by a user defined dict-like object. + In general, the dict-like features should be maintained but + extra features can be added. To replace one of the dicts create + a new graph class by changing the class(!) variable holding the + factory for that dict-like structure. The variable names are + node_dict_factory, node_attr_dict_factory, adjlist_inner_dict_factory, + adjlist_outer_dict_factory, edge_key_dict_factory, edge_attr_dict_factory + and graph_attr_dict_factory. + + node_dict_factory : function, (default: dict) + Factory function to be used to create the dict containing node + attributes, keyed by node id. + It should require no arguments and return a dict-like object + + node_attr_dict_factory: function, (default: dict) + Factory function to be used to create the node attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object + + adjlist_outer_dict_factory : function, (default: dict) + Factory function to be used to create the outer-most dict + in the data structure that holds adjacency info keyed by node. + It should require no arguments and return a dict-like object. + + adjlist_inner_dict_factory : function, (default: dict) + Factory function to be used to create the adjacency list + dict which holds multiedge key dicts keyed by neighbor. + It should require no arguments and return a dict-like object. + + edge_key_dict_factory : function, (default: dict) + Factory function to be used to create the edge key dict + which holds edge data keyed by edge key. + It should require no arguments and return a dict-like object. + + edge_attr_dict_factory : function, (default: dict) + Factory function to be used to create the edge attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + graph_attr_dict_factory : function, (default: dict) + Factory function to be used to create the graph attribute + dict which holds attribute values keyed by attribute name. + It should require no arguments and return a dict-like object. + + Typically, if your extension doesn't impact the data structure all + methods will inherited without issue except: `to_directed/to_undirected`. + By default these methods create a DiGraph/Graph class and you probably + want them to create your extension of a DiGraph/Graph. To facilitate + this we define two class variables that you can set in your subclass. + + to_directed_class : callable, (default: DiGraph or MultiDiGraph) + Class to create a new graph structure in the `to_directed` method. + If `None`, a NetworkX class (DiGraph or MultiDiGraph) is used. + + to_undirected_class : callable, (default: Graph or MultiGraph) + Class to create a new graph structure in the `to_undirected` method. + If `None`, a NetworkX class (Graph or MultiGraph) is used. + + **Subclassing Example** + + Create a low memory graph class that effectively disallows edge + attributes by using a single attribute dict for all edges. + This reduces the memory used, but you lose edge attributes. + + >>> class ThinGraph(nx.Graph): + ... all_edge_dict = {"weight": 1} + ... + ... def single_edge_dict(self): + ... return self.all_edge_dict + ... + ... edge_attr_dict_factory = single_edge_dict + >>> G = ThinGraph() + >>> G.add_edge(2, 1) + >>> G[2][1] + {'weight': 1} + >>> G.add_edge(2, 2) + >>> G[2][1] is G[2][2] + True + """ + + # node_dict_factory = dict # already assigned in Graph + # adjlist_outer_dict_factory = dict + # adjlist_inner_dict_factory = dict + edge_key_dict_factory = dict + # edge_attr_dict_factory = dict + + def to_directed_class(self): + """Returns the class to use for empty directed copies. + + If you subclass the base classes, use this to designate + what directed class to use for `to_directed()` copies. + """ + return nx.MultiDiGraph + + def to_undirected_class(self): + """Returns the class to use for empty undirected copies. + + If you subclass the base classes, use this to designate + what directed class to use for `to_directed()` copies. + """ + return MultiGraph + + def __init__(self, incoming_graph_data=None, multigraph_input=None, **attr): + """Initialize a graph with edges, name, or graph attributes. + + Parameters + ---------- + incoming_graph_data : input graph + Data to initialize graph. If incoming_graph_data=None (default) + an empty graph is created. The data can be an edge list, or any + NetworkX graph object. If the corresponding optional Python + packages are installed the data can also be a 2D NumPy array, a + SciPy sparse array, or a PyGraphviz graph. + + multigraph_input : bool or None (default None) + Note: Only used when `incoming_graph_data` is a dict. + If True, `incoming_graph_data` is assumed to be a + dict-of-dict-of-dict-of-dict structure keyed by + node to neighbor to edge keys to edge data for multi-edges. + A NetworkXError is raised if this is not the case. + If False, :func:`to_networkx_graph` is used to try to determine + the dict's graph data structure as either a dict-of-dict-of-dict + keyed by node to neighbor to edge data, or a dict-of-iterable + keyed by node to neighbors. + If None, the treatment for True is tried, but if it fails, + the treatment for False is tried. + + attr : keyword arguments, optional (default= no attributes) + Attributes to add to graph as key=value pairs. + + See Also + -------- + convert + + Examples + -------- + >>> G = nx.MultiGraph() + >>> G = nx.MultiGraph(name="my graph") + >>> e = [(1, 2), (1, 2), (2, 3), (3, 4)] # list of edges + >>> G = nx.MultiGraph(e) + + Arbitrary graph attribute pairs (key=value) may be assigned + + >>> G = nx.MultiGraph(e, day="Friday") + >>> G.graph + {'day': 'Friday'} + + """ + # multigraph_input can be None/True/False. So check "is not False" + if isinstance(incoming_graph_data, dict) and multigraph_input is not False: + Graph.__init__(self) + try: + convert.from_dict_of_dicts( + incoming_graph_data, create_using=self, multigraph_input=True + ) + self.graph.update(attr) + except Exception as err: + if multigraph_input is True: + raise nx.NetworkXError( + f"converting multigraph_input raised:\n{type(err)}: {err}" + ) + Graph.__init__(self, incoming_graph_data, **attr) + else: + Graph.__init__(self, incoming_graph_data, **attr) + + @cached_property + def adj(self): + """Graph adjacency object holding the neighbors of each node. + + This object is a read-only dict-like structure with node keys + and neighbor-dict values. The neighbor-dict is keyed by neighbor + to the edgekey-data-dict. So `G.adj[3][2][0]['color'] = 'blue'` sets + the color of the edge `(3, 2, 0)` to `"blue"`. + + Iterating over G.adj behaves like a dict. Useful idioms include + `for nbr, edgesdict in G.adj[n].items():`. + + The neighbor information is also provided by subscripting the graph. + + Examples + -------- + >>> e = [(1, 2), (1, 2), (1, 3), (3, 4)] # list of edges + >>> G = nx.MultiGraph(e) + >>> G.edges[1, 2, 0]["weight"] = 3 + >>> result = set() + >>> for edgekey, data in G[1][2].items(): + ... result.add(data.get("weight", 1)) + >>> result + {1, 3} + + For directed graphs, `G.adj` holds outgoing (successor) info. + """ + return MultiAdjacencyView(self._adj) + + def new_edge_key(self, u, v): + """Returns an unused key for edges between nodes `u` and `v`. + + The nodes `u` and `v` do not need to be already in the graph. + + Notes + ----- + In the standard MultiGraph class the new key is the number of existing + edges between `u` and `v` (increased if necessary to ensure unused). + The first edge will have key 0, then 1, etc. If an edge is removed + further new_edge_keys may not be in this order. + + Parameters + ---------- + u, v : nodes + + Returns + ------- + key : int + """ + try: + keydict = self._adj[u][v] + except KeyError: + return 0 + key = len(keydict) + while key in keydict: + key += 1 + return key + + def add_edge(self, u_for_edge, v_for_edge, key=None, **attr): + """Add an edge between u and v. + + The nodes u and v will be automatically added if they are + not already in the graph. + + Edge attributes can be specified with keywords or by directly + accessing the edge's attribute dictionary. See examples below. + + Parameters + ---------- + u_for_edge, v_for_edge : nodes + Nodes can be, for example, strings or numbers. + Nodes must be hashable (and not None) Python objects. + key : hashable identifier, optional (default=lowest unused integer) + Used to distinguish multiedges between a pair of nodes. + attr : keyword arguments, optional + Edge data (or labels or objects) can be assigned using + keyword arguments. + + Returns + ------- + The edge key assigned to the edge. + + See Also + -------- + add_edges_from : add a collection of edges + + Notes + ----- + To replace/update edge data, use the optional key argument + to identify a unique edge. Otherwise a new edge will be created. + + NetworkX algorithms designed for weighted graphs cannot use + multigraphs directly because it is not clear how to handle + multiedge weights. Convert to Graph using edge attribute + 'weight' to enable weighted graph algorithms. + + Default keys are generated using the method `new_edge_key()`. + This method can be overridden by subclassing the base class and + providing a custom `new_edge_key()` method. + + Examples + -------- + The following each add an additional edge e=(1, 2) to graph G: + + >>> G = nx.MultiGraph() + >>> e = (1, 2) + >>> ekey = G.add_edge(1, 2) # explicit two-node form + >>> G.add_edge(*e) # single edge as tuple of two nodes + 1 + >>> G.add_edges_from([(1, 2)]) # add edges from iterable container + [2] + + Associate data to edges using keywords: + + >>> ekey = G.add_edge(1, 2, weight=3) + >>> ekey = G.add_edge(1, 2, key=0, weight=4) # update data for key=0 + >>> ekey = G.add_edge(1, 3, weight=7, capacity=15, length=342.7) + + For non-string attribute keys, use subscript notation. + + >>> ekey = G.add_edge(1, 2) + >>> G[1][2][0].update({0: 5}) + >>> G.edges[1, 2, 0].update({0: 5}) + """ + u, v = u_for_edge, v_for_edge + # add nodes + if u not in self._adj: + if u is None: + raise ValueError("None cannot be a node") + self._adj[u] = self.adjlist_inner_dict_factory() + self._node[u] = self.node_attr_dict_factory() + if v not in self._adj: + if v is None: + raise ValueError("None cannot be a node") + self._adj[v] = self.adjlist_inner_dict_factory() + self._node[v] = self.node_attr_dict_factory() + if key is None: + key = self.new_edge_key(u, v) + if v in self._adj[u]: + keydict = self._adj[u][v] + datadict = keydict.get(key, self.edge_attr_dict_factory()) + datadict.update(attr) + keydict[key] = datadict + else: + # selfloops work this way without special treatment + datadict = self.edge_attr_dict_factory() + datadict.update(attr) + keydict = self.edge_key_dict_factory() + keydict[key] = datadict + self._adj[u][v] = keydict + self._adj[v][u] = keydict + nx._clear_cache(self) + return key + + def add_edges_from(self, ebunch_to_add, **attr): + """Add all the edges in ebunch_to_add. + + Parameters + ---------- + ebunch_to_add : container of edges + Each edge given in the container will be added to the + graph. The edges can be: + + - 2-tuples (u, v) or + - 3-tuples (u, v, d) for an edge data dict d, or + - 3-tuples (u, v, k) for not iterable key k, or + - 4-tuples (u, v, k, d) for an edge with data and key k + + attr : keyword arguments, optional + Edge data (or labels or objects) can be assigned using + keyword arguments. + + Returns + ------- + A list of edge keys assigned to the edges in `ebunch`. + + See Also + -------- + add_edge : add a single edge + add_weighted_edges_from : convenient way to add weighted edges + + Notes + ----- + Adding the same edge twice has no effect but any edge data + will be updated when each duplicate edge is added. + + Edge attributes specified in an ebunch take precedence over + attributes specified via keyword arguments. + + Default keys are generated using the method ``new_edge_key()``. + This method can be overridden by subclassing the base class and + providing a custom ``new_edge_key()`` method. + + When adding edges from an iterator over the graph you are changing, + a `RuntimeError` can be raised with message: + `RuntimeError: dictionary changed size during iteration`. This + happens when the graph's underlying dictionary is modified during + iteration. To avoid this error, evaluate the iterator into a separate + object, e.g. by using `list(iterator_of_edges)`, and pass this + object to `G.add_edges_from`. + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> G.add_edges_from([(0, 1), (1, 2)]) # using a list of edge tuples + >>> e = zip(range(0, 3), range(1, 4)) + >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 + + Associate data to edges + + >>> G.add_edges_from([(1, 2), (2, 3)], weight=3) + >>> G.add_edges_from([(3, 4), (1, 4)], label="WN2898") + + Evaluate an iterator over a graph if using it to modify the same graph + + >>> G = nx.MultiGraph([(1, 2), (2, 3), (3, 4)]) + >>> # Grow graph by one new node, adding edges to all existing nodes. + >>> # wrong way - will raise RuntimeError + >>> # G.add_edges_from(((5, n) for n in G.nodes)) + >>> # right way - note that there will be no self-edge for node 5 + >>> assigned_keys = G.add_edges_from(list((5, n) for n in G.nodes)) + """ + keylist = [] + for e in ebunch_to_add: + ne = len(e) + if ne == 4: + u, v, key, dd = e + elif ne == 3: + u, v, dd = e + key = None + elif ne == 2: + u, v = e + dd = {} + key = None + else: + msg = f"Edge tuple {e} must be a 2-tuple, 3-tuple or 4-tuple." + raise NetworkXError(msg) + ddd = {} + ddd.update(attr) + try: + ddd.update(dd) + except (TypeError, ValueError): + if ne != 3: + raise + key = dd # ne == 3 with 3rd value not dict, must be a key + key = self.add_edge(u, v, key) + self[u][v][key].update(ddd) + keylist.append(key) + nx._clear_cache(self) + return keylist + + def remove_edge(self, u, v, key=None): + """Remove an edge between u and v. + + Parameters + ---------- + u, v : nodes + Remove an edge between nodes u and v. + key : hashable identifier, optional (default=None) + Used to distinguish multiple edges between a pair of nodes. + If None, remove a single edge between u and v. If there are + multiple edges, removes the last edge added in terms of + insertion order. + + Raises + ------ + NetworkXError + If there is not an edge between u and v, or + if there is no edge with the specified key. + + See Also + -------- + remove_edges_from : remove a collection of edges + + Examples + -------- + >>> G = nx.MultiGraph() + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.remove_edge(0, 1) + >>> e = (1, 2) + >>> G.remove_edge(*e) # unpacks e from an edge tuple + + For multiple edges + + >>> G = nx.MultiGraph() # or MultiDiGraph, etc + >>> G.add_edges_from([(1, 2), (1, 2), (1, 2)]) # key_list returned + [0, 1, 2] + + When ``key=None`` (the default), edges are removed in the opposite + order that they were added: + + >>> G.remove_edge(1, 2) + >>> G.edges(keys=True) + MultiEdgeView([(1, 2, 0), (1, 2, 1)]) + >>> G.remove_edge(2, 1) # edges are not directed + >>> G.edges(keys=True) + MultiEdgeView([(1, 2, 0)]) + + For edges with keys + + >>> G = nx.MultiGraph() + >>> G.add_edge(1, 2, key="first") + 'first' + >>> G.add_edge(1, 2, key="second") + 'second' + >>> G.remove_edge(1, 2, key="first") + >>> G.edges(keys=True) + MultiEdgeView([(1, 2, 'second')]) + + """ + try: + d = self._adj[u][v] + except KeyError as err: + raise NetworkXError(f"The edge {u}-{v} is not in the graph.") from err + # remove the edge with specified data + if key is None: + d.popitem() + else: + try: + del d[key] + except KeyError as err: + msg = f"The edge {u}-{v} with key {key} is not in the graph." + raise NetworkXError(msg) from err + if len(d) == 0: + # remove the key entries if last edge + del self._adj[u][v] + if u != v: # check for selfloop + del self._adj[v][u] + nx._clear_cache(self) + + def remove_edges_from(self, ebunch): + """Remove all edges specified in ebunch. + + Parameters + ---------- + ebunch: list or container of edge tuples + Each edge given in the list or container will be removed + from the graph. The edges can be: + + - 2-tuples (u, v) A single edge between u and v is removed. + - 3-tuples (u, v, key) The edge identified by key is removed. + - 4-tuples (u, v, key, data) where data is ignored. + + See Also + -------- + remove_edge : remove a single edge + + Notes + ----- + Will fail silently if an edge in ebunch is not in the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> ebunch = [(1, 2), (2, 3)] + >>> G.remove_edges_from(ebunch) + + Removing multiple copies of edges + + >>> G = nx.MultiGraph() + >>> keys = G.add_edges_from([(1, 2), (1, 2), (1, 2)]) + >>> G.remove_edges_from([(1, 2), (2, 1)]) # edges aren't directed + >>> list(G.edges()) + [(1, 2)] + >>> G.remove_edges_from([(1, 2), (1, 2)]) # silently ignore extra copy + >>> list(G.edges) # now empty graph + [] + + When the edge is a 2-tuple ``(u, v)`` but there are multiple edges between + u and v in the graph, the most recent edge (in terms of insertion + order) is removed. + + >>> G = nx.MultiGraph() + >>> for key in ("x", "y", "a"): + ... k = G.add_edge(0, 1, key=key) + >>> G.edges(keys=True) + MultiEdgeView([(0, 1, 'x'), (0, 1, 'y'), (0, 1, 'a')]) + >>> G.remove_edges_from([(0, 1)]) + >>> G.edges(keys=True) + MultiEdgeView([(0, 1, 'x'), (0, 1, 'y')]) + + """ + for e in ebunch: + try: + self.remove_edge(*e[:3]) + except NetworkXError: + pass + nx._clear_cache(self) + + def has_edge(self, u, v, key=None): + """Returns True if the graph has an edge between nodes u and v. + + This is the same as `v in G[u] or key in G[u][v]` + without KeyError exceptions. + + Parameters + ---------- + u, v : nodes + Nodes can be, for example, strings or numbers. + + key : hashable identifier, optional (default=None) + If specified return True only if the edge with + key is found. + + Returns + ------- + edge_ind : bool + True if edge is in the graph, False otherwise. + + Examples + -------- + Can be called either using two nodes u, v, an edge tuple (u, v), + or an edge tuple (u, v, key). + + >>> G = nx.MultiGraph() # or MultiDiGraph + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.has_edge(0, 1) # using two nodes + True + >>> e = (0, 1) + >>> G.has_edge(*e) # e is a 2-tuple (u, v) + True + >>> G.add_edge(0, 1, key="a") + 'a' + >>> G.has_edge(0, 1, key="a") # specify key + True + >>> G.has_edge(1, 0, key="a") # edges aren't directed + True + >>> e = (0, 1, "a") + >>> G.has_edge(*e) # e is a 3-tuple (u, v, 'a') + True + + The following syntax are equivalent: + + >>> G.has_edge(0, 1) + True + >>> 1 in G[0] # though this gives :exc:`KeyError` if 0 not in G + True + >>> 0 in G[1] # other order; also gives :exc:`KeyError` if 0 not in G + True + + """ + try: + if key is None: + return v in self._adj[u] + else: + return key in self._adj[u][v] + except KeyError: + return False + + @cached_property + def edges(self): + """Returns an iterator over the edges. + + edges(self, nbunch=None, data=False, keys=False, default=None) + + The MultiEdgeView provides set-like operations on the edge-tuples + as well as edge attribute lookup. When called, it also provides + an EdgeDataView object which allows control of access to edge + attributes (but does not provide set-like operations). + Hence, ``G.edges[u, v, k]['color']`` provides the value of the color + attribute for the edge from ``u`` to ``v`` with key ``k`` while + ``for (u, v, k, c) in G.edges(data='color', keys=True, default="red"):`` + iterates through all the edges yielding the color attribute with + default `'red'` if no color attribute exists. + + Edges are returned as tuples with optional data and keys + in the order (node, neighbor, key, data). If ``keys=True`` is not + provided, the tuples will just be (node, neighbor, data), but + multiple tuples with the same node and neighbor will be generated + when multiple edges exist between two nodes. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges from these nodes. + data : string or bool, optional (default=False) + The edge attribute returned in 3-tuple (u, v, ddict[data]). + If True, return edge attribute dict in 3-tuple (u, v, ddict). + If False, return 2-tuple (u, v). + keys : bool, optional (default=False) + If True, return edge keys with each edge, creating (u, v, k) + tuples or (u, v, k, d) tuples if data is also requested. + default : value, optional (default=None) + Value used for edges that don't have the requested attribute. + Only relevant if data is not True or False. + + Returns + ------- + edges : MultiEdgeView + A view of edge attributes, usually it iterates over (u, v) + (u, v, k) or (u, v, k, d) tuples of edges, but can also be + used for attribute lookup as ``edges[u, v, k]['foo']``. + + Notes + ----- + Nodes in nbunch that are not in the graph will be (quietly) ignored. + For directed graphs this returns the out-edges. + + Examples + -------- + >>> G = nx.MultiGraph() + >>> nx.add_path(G, [0, 1, 2]) + >>> key = G.add_edge(2, 3, weight=5) + >>> key2 = G.add_edge(2, 1, weight=2) # multi-edge + >>> [e for e in G.edges()] + [(0, 1), (1, 2), (1, 2), (2, 3)] + >>> G.edges.data() # default data is {} (empty dict) + MultiEdgeDataView([(0, 1, {}), (1, 2, {}), (1, 2, {'weight': 2}), (2, 3, {'weight': 5})]) + >>> G.edges.data("weight", default=1) + MultiEdgeDataView([(0, 1, 1), (1, 2, 1), (1, 2, 2), (2, 3, 5)]) + >>> G.edges(keys=True) # default keys are integers + MultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 1), (2, 3, 0)]) + >>> G.edges.data(keys=True) + MultiEdgeDataView([(0, 1, 0, {}), (1, 2, 0, {}), (1, 2, 1, {'weight': 2}), (2, 3, 0, {'weight': 5})]) + >>> G.edges.data("weight", default=1, keys=True) + MultiEdgeDataView([(0, 1, 0, 1), (1, 2, 0, 1), (1, 2, 1, 2), (2, 3, 0, 5)]) + >>> G.edges([0, 3]) # Note ordering of tuples from listed sources + MultiEdgeDataView([(0, 1), (3, 2)]) + >>> G.edges([0, 3, 2, 1]) # Note ordering of tuples + MultiEdgeDataView([(0, 1), (3, 2), (2, 1), (2, 1)]) + >>> G.edges(0) + MultiEdgeDataView([(0, 1)]) + """ + return MultiEdgeView(self) + + def get_edge_data(self, u, v, key=None, default=None): + """Returns the attribute dictionary associated with edge (u, v, + key). + + If a key is not provided, returns a dictionary mapping edge keys + to attribute dictionaries for each edge between u and v. + + This is identical to `G[u][v][key]` except the default is returned + instead of an exception is the edge doesn't exist. + + Parameters + ---------- + u, v : nodes + + default : any Python object (default=None) + Value to return if the specific edge (u, v, key) is not + found, OR if there are no edges between u and v and no key + is specified. + + key : hashable identifier, optional (default=None) + Return data only for the edge with specified key, as an + attribute dictionary (rather than a dictionary mapping keys + to attribute dictionaries). + + Returns + ------- + edge_dict : dictionary + The edge attribute dictionary, OR a dictionary mapping edge + keys to attribute dictionaries for each of those edges if no + specific key is provided (even if there's only one edge + between u and v). + + Examples + -------- + >>> G = nx.MultiGraph() # or MultiDiGraph + >>> key = G.add_edge(0, 1, key="a", weight=7) + >>> G[0][1]["a"] # key='a' + {'weight': 7} + >>> G.edges[0, 1, "a"] # key='a' + {'weight': 7} + + Warning: we protect the graph data structure by making + `G.edges` and `G[1][2]` read-only dict-like structures. + However, you can assign values to attributes in e.g. + `G.edges[1, 2, 'a']` or `G[1][2]['a']` using an additional + bracket as shown next. You need to specify all edge info + to assign to the edge data associated with an edge. + + >>> G[0][1]["a"]["weight"] = 10 + >>> G.edges[0, 1, "a"]["weight"] = 10 + >>> G[0][1]["a"]["weight"] + 10 + >>> G.edges[1, 0, "a"]["weight"] + 10 + + >>> G = nx.MultiGraph() # or MultiDiGraph + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.edges[0, 1, 0]["weight"] = 5 + >>> G.get_edge_data(0, 1) + {0: {'weight': 5}} + >>> e = (0, 1) + >>> G.get_edge_data(*e) # tuple form + {0: {'weight': 5}} + >>> G.get_edge_data(3, 0) # edge not in graph, returns None + >>> G.get_edge_data(3, 0, default=0) # edge not in graph, return default + 0 + >>> G.get_edge_data(1, 0, 0) # specific key gives back + {'weight': 5} + """ + try: + if key is None: + return self._adj[u][v] + else: + return self._adj[u][v][key] + except KeyError: + return default + + @cached_property + def degree(self): + """A DegreeView for the Graph as G.degree or G.degree(). + + The node degree is the number of edges adjacent to the node. + The weighted node degree is the sum of the edge weights for + edges incident to that node. + + This object provides an iterator for (node, degree) as well as + lookup for the degree for a single node. + + Parameters + ---------- + nbunch : single node, container, or all nodes (default= all nodes) + The view will only report edges incident to these nodes. + + weight : string or None, optional (default=None) + The name of an edge attribute that holds the numerical value used + as a weight. If None, then each edge has weight 1. + The degree is the sum of the edge weights adjacent to the node. + + Returns + ------- + MultiDegreeView or int + If multiple nodes are requested (the default), returns a `MultiDegreeView` + mapping nodes to their degree. + If a single node is requested, returns the degree of the node as an integer. + + Examples + -------- + >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> nx.add_path(G, [0, 1, 2, 3]) + >>> G.degree(0) # node 0 with degree 1 + 1 + >>> list(G.degree([0, 1])) + [(0, 1), (1, 2)] + + """ + return MultiDegreeView(self) + + def is_multigraph(self): + """Returns True if graph is a multigraph, False otherwise.""" + return True + + def is_directed(self): + """Returns True if graph is directed, False otherwise.""" + return False + + def copy(self, as_view=False): + """Returns a copy of the graph. + + The copy method by default returns an independent shallow copy + of the graph and attributes. That is, if an attribute is a + container, that container is shared by the original an the copy. + Use Python's `copy.deepcopy` for new containers. + + If `as_view` is True then a view is returned instead of a copy. + + Notes + ----- + All copies reproduce the graph structure, but data attributes + may be handled in different ways. There are four types of copies + of a graph that people might want. + + Deepcopy -- A "deepcopy" copies the graph structure as well as + all data attributes and any objects they might contain. + The entire graph object is new so that changes in the copy + do not affect the original object. (see Python's copy.deepcopy) + + Data Reference (Shallow) -- For a shallow copy the graph structure + is copied but the edge, node and graph attribute dicts are + references to those in the original graph. This saves + time and memory but could cause confusion if you change an attribute + in one graph and it changes the attribute in the other. + NetworkX does not provide this level of shallow copy. + + Independent Shallow -- This copy creates new independent attribute + dicts and then does a shallow copy of the attributes. That is, any + attributes that are containers are shared between the new graph + and the original. This is exactly what `dict.copy()` provides. + You can obtain this style copy using: + + >>> G = nx.path_graph(5) + >>> H = G.copy() + >>> H = G.copy(as_view=False) + >>> H = nx.Graph(G) + >>> H = G.__class__(G) + + Fresh Data -- For fresh data, the graph structure is copied while + new empty data attribute dicts are created. The resulting graph + is independent of the original and it has no edge, node or graph + attributes. Fresh copies are not enabled. Instead use: + + >>> H = G.__class__() + >>> H.add_nodes_from(G) + >>> H.add_edges_from(G.edges) + + View -- Inspired by dict-views, graph-views act like read-only + versions of the original graph, providing a copy of the original + structure without requiring any memory for copying the information. + + See the Python copy module for more information on shallow + and deep copies, https://docs.python.org/3/library/copy.html. + + Parameters + ---------- + as_view : bool, optional (default=False) + If True, the returned graph-view provides a read-only view + of the original graph without actually copying any data. + + Returns + ------- + G : Graph + A copy of the graph. + + See Also + -------- + to_directed: return a directed copy of the graph. + + Examples + -------- + >>> G = nx.path_graph(4) # or DiGraph, MultiGraph, MultiDiGraph, etc + >>> H = G.copy() + + """ + if as_view is True: + return nx.graphviews.generic_graph_view(self) + G = self.__class__() + G.graph.update(self.graph) + G.add_nodes_from((n, d.copy()) for n, d in self._node.items()) + G.add_edges_from( + (u, v, key, datadict.copy()) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) + return G + + def to_directed(self, as_view=False): + """Returns a directed representation of the graph. + + Returns + ------- + G : MultiDiGraph + A directed graph with the same name, same nodes, and with + each edge (u, v, k, data) replaced by two directed edges + (u, v, k, data) and (v, u, k, data). + + Notes + ----- + This returns a "deepcopy" of the edge, node, and + graph attributes which attempts to completely copy + all of the data and references. + + This is in contrast to the similar D=MultiDiGraph(G) which + returns a shallow copy of the data. + + See the Python copy module for more information on shallow + and deep copies, https://docs.python.org/3/library/copy.html. + + Warning: If you have subclassed MultiGraph to use dict-like objects + in the data structure, those changes do not transfer to the + MultiDiGraph created by this method. + + Examples + -------- + >>> G = nx.MultiGraph() + >>> G.add_edge(0, 1) + 0 + >>> G.add_edge(0, 1) + 1 + >>> H = G.to_directed() + >>> list(H.edges) + [(0, 1, 0), (0, 1, 1), (1, 0, 0), (1, 0, 1)] + + If already directed, return a (deep) copy + + >>> G = nx.MultiDiGraph() + >>> G.add_edge(0, 1) + 0 + >>> H = G.to_directed() + >>> list(H.edges) + [(0, 1, 0)] + """ + graph_class = self.to_directed_class() + if as_view is True: + return nx.graphviews.generic_graph_view(self, graph_class) + # deepcopy when not a view + G = graph_class() + G.graph.update(deepcopy(self.graph)) + G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) + G.add_edges_from( + (u, v, key, deepcopy(datadict)) + for u, nbrs in self.adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) + return G + + def to_undirected(self, as_view=False): + """Returns an undirected copy of the graph. + + Returns + ------- + G : Graph/MultiGraph + A deepcopy of the graph. + + See Also + -------- + copy, add_edge, add_edges_from + + Notes + ----- + This returns a "deepcopy" of the edge, node, and + graph attributes which attempts to completely copy + all of the data and references. + + This is in contrast to the similar `G = nx.MultiGraph(D)` + which returns a shallow copy of the data. + + See the Python copy module for more information on shallow + and deep copies, https://docs.python.org/3/library/copy.html. + + Warning: If you have subclassed MultiGraph to use dict-like + objects in the data structure, those changes do not transfer + to the MultiGraph created by this method. + + Examples + -------- + >>> G = nx.MultiGraph([(0, 1), (0, 1), (1, 2)]) + >>> H = G.to_directed() + >>> list(H.edges) + [(0, 1, 0), (0, 1, 1), (1, 0, 0), (1, 0, 1), (1, 2, 0), (2, 1, 0)] + >>> G2 = H.to_undirected() + >>> list(G2.edges) + [(0, 1, 0), (0, 1, 1), (1, 2, 0)] + """ + graph_class = self.to_undirected_class() + if as_view is True: + return nx.graphviews.generic_graph_view(self, graph_class) + # deepcopy when not a view + G = graph_class() + G.graph.update(deepcopy(self.graph)) + G.add_nodes_from((n, deepcopy(d)) for n, d in self._node.items()) + G.add_edges_from( + (u, v, key, deepcopy(datadict)) + for u, nbrs in self._adj.items() + for v, keydict in nbrs.items() + for key, datadict in keydict.items() + ) + return G + + def number_of_edges(self, u=None, v=None): + """Returns the number of edges between two nodes. + + Parameters + ---------- + u, v : nodes, optional (Default=all edges) + If u and v are specified, return the number of edges between + u and v. Otherwise return the total number of all edges. + + Returns + ------- + nedges : int + The number of edges in the graph. If nodes `u` and `v` are + specified return the number of edges between those nodes. If + the graph is directed, this only returns the number of edges + from `u` to `v`. + + See Also + -------- + size + + Examples + -------- + For undirected multigraphs, this method counts the total number + of edges in the graph:: + + >>> G = nx.MultiGraph() + >>> G.add_edges_from([(0, 1), (0, 1), (1, 2)]) + [0, 1, 0] + >>> G.number_of_edges() + 3 + + If you specify two nodes, this counts the total number of edges + joining the two nodes:: + + >>> G.number_of_edges(0, 1) + 2 + + For directed multigraphs, this method can count the total number + of directed edges from `u` to `v`:: + + >>> G = nx.MultiDiGraph() + >>> G.add_edges_from([(0, 1), (0, 1), (1, 0)]) + [0, 1, 0] + >>> G.number_of_edges(0, 1) + 2 + >>> G.number_of_edges(1, 0) + 1 + + """ + if u is None: + return self.size() + try: + edgedata = self._adj[u][v] + except KeyError: + return 0 # no such edge + return len(edgedata) diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/reportviews.py b/.venv/lib/python3.12/site-packages/networkx/classes/reportviews.py new file mode 100644 index 0000000..789662d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/reportviews.py @@ -0,0 +1,1447 @@ +""" +View Classes provide node, edge and degree "views" of a graph. + +Views for nodes, edges and degree are provided for all base graph classes. +A view means a read-only object that is quick to create, automatically +updated when the graph changes, and provides basic access like `n in V`, +`for n in V`, `V[n]` and sometimes set operations. + +The views are read-only iterable containers that are updated as the +graph is updated. As with dicts, the graph should not be updated +while iterating through the view. Views can be iterated multiple times. + +Edge and Node views also allow data attribute lookup. +The resulting attribute dict is writable as `G.edges[3, 4]['color']='red'` +Degree views allow lookup of degree values for single nodes. +Weighted degree is supported with the `weight` argument. + +NodeView +======== + + `V = G.nodes` (or `V = G.nodes()`) allows `len(V)`, `n in V`, set + operations e.g. "G.nodes & H.nodes", and `dd = G.nodes[n]`, where + `dd` is the node data dict. Iteration is over the nodes by default. + +NodeDataView +============ + + To iterate over (node, data) pairs, use arguments to `G.nodes()` + to create a DataView e.g. `DV = G.nodes(data='color', default='red')`. + The DataView iterates as `for n, color in DV` and allows + `(n, 'red') in DV`. Using `DV = G.nodes(data=True)`, the DataViews + use the full datadict in writeable form also allowing contain testing as + `(n, {'color': 'red'}) in VD`. DataViews allow set operations when + data attributes are hashable. + +DegreeView +========== + + `V = G.degree` allows iteration over (node, degree) pairs as well + as lookup: `deg=V[n]`. There are many flavors of DegreeView + for In/Out/Directed/Multi. For Directed Graphs, `G.degree` + counts both in and out going edges. `G.out_degree` and + `G.in_degree` count only specific directions. + Weighted degree using edge data attributes is provide via + `V = G.degree(weight='attr_name')` where any string with the + attribute name can be used. `weight=None` is the default. + No set operations are implemented for degrees, use NodeView. + + The argument `nbunch` restricts iteration to nodes in nbunch. + The DegreeView can still lookup any node even if nbunch is specified. + +EdgeView +======== + + `V = G.edges` or `V = G.edges()` allows iteration over edges as well as + `e in V`, set operations and edge data lookup `dd = G.edges[2, 3]`. + Iteration is over 2-tuples `(u, v)` for Graph/DiGraph. For multigraphs + edges 3-tuples `(u, v, key)` are the default but 2-tuples can be obtained + via `V = G.edges(keys=False)`. + + Set operations for directed graphs treat the edges as a set of 2-tuples. + For undirected graphs, 2-tuples are not a unique representation of edges. + So long as the set being compared to contains unique representations + of its edges, the set operations will act as expected. If the other + set contains both `(0, 1)` and `(1, 0)` however, the result of set + operations may contain both representations of the same edge. + +EdgeDataView +============ + + Edge data can be reported using an EdgeDataView typically created + by calling an EdgeView: `DV = G.edges(data='weight', default=1)`. + The EdgeDataView allows iteration over edge tuples, membership checking + but no set operations. + + Iteration depends on `data` and `default` and for multigraph `keys` + If `data is False` (the default) then iterate over 2-tuples `(u, v)`. + If `data is True` iterate over 3-tuples `(u, v, datadict)`. + Otherwise iterate over `(u, v, datadict.get(data, default))`. + For Multigraphs, if `keys is True`, replace `u, v` with `u, v, key` + to create 3-tuples and 4-tuples. + + The argument `nbunch` restricts edges to those incident to nodes in nbunch. +""" + +from abc import ABC +from collections.abc import Mapping, Set + +import networkx as nx + +__all__ = [ + "NodeView", + "NodeDataView", + "EdgeView", + "OutEdgeView", + "InEdgeView", + "EdgeDataView", + "OutEdgeDataView", + "InEdgeDataView", + "MultiEdgeView", + "OutMultiEdgeView", + "InMultiEdgeView", + "MultiEdgeDataView", + "OutMultiEdgeDataView", + "InMultiEdgeDataView", + "DegreeView", + "DiDegreeView", + "InDegreeView", + "OutDegreeView", + "MultiDegreeView", + "DiMultiDegreeView", + "InMultiDegreeView", + "OutMultiDegreeView", +] + + +# NodeViews +class NodeView(Mapping, Set): + """A NodeView class to act as G.nodes for a NetworkX Graph + + Set operations act on the nodes without considering data. + Iteration is over nodes. Node data can be looked up like a dict. + Use NodeDataView to iterate over node data or to specify a data + attribute for lookup. NodeDataView is created by calling the NodeView. + + Parameters + ---------- + graph : NetworkX graph-like class + + Examples + -------- + >>> G = nx.path_graph(3) + >>> NV = G.nodes() + >>> 2 in NV + True + >>> for n in NV: + ... print(n) + 0 + 1 + 2 + >>> assert NV & {1, 2, 3} == {1, 2} + + >>> G.add_node(2, color="blue") + >>> NV[2] + {'color': 'blue'} + >>> G.add_node(8, color="red") + >>> NDV = G.nodes(data=True) + >>> (2, NV[2]) in NDV + True + >>> for n, dd in NDV: + ... print((n, dd.get("color", "aqua"))) + (0, 'aqua') + (1, 'aqua') + (2, 'blue') + (8, 'red') + >>> NDV[2] == NV[2] + True + + >>> NVdata = G.nodes(data="color", default="aqua") + >>> (2, NVdata[2]) in NVdata + True + >>> for n, dd in NVdata: + ... print((n, dd)) + (0, 'aqua') + (1, 'aqua') + (2, 'blue') + (8, 'red') + >>> NVdata[2] == NV[2] # NVdata gets 'color', NV gets datadict + False + """ + + __slots__ = ("_nodes",) + + def __getstate__(self): + return {"_nodes": self._nodes} + + def __setstate__(self, state): + self._nodes = state["_nodes"] + + def __init__(self, graph): + self._nodes = graph._node + + # Mapping methods + def __len__(self): + return len(self._nodes) + + def __iter__(self): + return iter(self._nodes) + + def __getitem__(self, n): + if isinstance(n, slice): + raise nx.NetworkXError( + f"{type(self).__name__} does not support slicing, " + f"try list(G.nodes)[{n.start}:{n.stop}:{n.step}]" + ) + return self._nodes[n] + + # Set methods + def __contains__(self, n): + return n in self._nodes + + @classmethod + def _from_iterable(cls, it): + return set(it) + + # DataView method + def __call__(self, data=False, default=None): + if data is False: + return self + return NodeDataView(self._nodes, data, default) + + def data(self, data=True, default=None): + """ + Return a read-only view of node data. + + Parameters + ---------- + data : bool or node data key, default=True + If ``data=True`` (the default), return a `NodeDataView` object that + maps each node to *all* of its attributes. `data` may also be an + arbitrary key, in which case the `NodeDataView` maps each node to + the value for the keyed attribute. In this case, if a node does + not have the `data` attribute, the `default` value is used. + default : object, default=None + The value used when a node does not have a specific attribute. + + Returns + ------- + NodeDataView + The layout of the returned NodeDataView depends on the value of the + `data` parameter. + + Notes + ----- + If ``data=False``, returns a `NodeView` object without data. + + See Also + -------- + NodeDataView + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_nodes_from( + ... [ + ... (0, {"color": "red", "weight": 10}), + ... (1, {"color": "blue"}), + ... (2, {"color": "yellow", "weight": 2}), + ... ] + ... ) + + Accessing node data with ``data=True`` (the default) returns a + NodeDataView mapping each node to all of its attributes: + + >>> G.nodes.data() + NodeDataView({0: {'color': 'red', 'weight': 10}, 1: {'color': 'blue'}, 2: {'color': 'yellow', 'weight': 2}}) + + If `data` represents a key in the node attribute dict, a NodeDataView mapping + the nodes to the value for that specific key is returned: + + >>> G.nodes.data("color") + NodeDataView({0: 'red', 1: 'blue', 2: 'yellow'}, data='color') + + If a specific key is not found in an attribute dict, the value specified + by `default` is returned: + + >>> G.nodes.data("weight", default=-999) + NodeDataView({0: 10, 1: -999, 2: 2}, data='weight') + + Note that there is no check that the `data` key is in any of the + node attribute dictionaries: + + >>> G.nodes.data("height") + NodeDataView({0: None, 1: None, 2: None}, data='height') + """ + if data is False: + return self + return NodeDataView(self._nodes, data, default) + + def __str__(self): + return str(list(self)) + + def __repr__(self): + return f"{self.__class__.__name__}({tuple(self)})" + + +class NodeDataView(Set): + """A DataView class for nodes of a NetworkX Graph + + The main use for this class is to iterate through node-data pairs. + The data can be the entire data-dictionary for each node, or it + can be a specific attribute (with default) for each node. + Set operations are enabled with NodeDataView, but don't work in + cases where the data is not hashable. Use with caution. + Typically, set operations on nodes use NodeView, not NodeDataView. + That is, they use `G.nodes` instead of `G.nodes(data='foo')`. + + Parameters + ========== + graph : NetworkX graph-like class + data : bool or string (default=False) + default : object (default=None) + """ + + __slots__ = ("_nodes", "_data", "_default") + + def __getstate__(self): + return {"_nodes": self._nodes, "_data": self._data, "_default": self._default} + + def __setstate__(self, state): + self._nodes = state["_nodes"] + self._data = state["_data"] + self._default = state["_default"] + + def __init__(self, nodedict, data=False, default=None): + self._nodes = nodedict + self._data = data + self._default = default + + @classmethod + def _from_iterable(cls, it): + try: + return set(it) + except TypeError as err: + if "unhashable" in str(err): + msg = " : Could be b/c data=True or your values are unhashable" + raise TypeError(str(err) + msg) from err + raise + + def __len__(self): + return len(self._nodes) + + def __iter__(self): + data = self._data + if data is False: + return iter(self._nodes) + if data is True: + return iter(self._nodes.items()) + return ( + (n, dd[data] if data in dd else self._default) + for n, dd in self._nodes.items() + ) + + def __contains__(self, n): + try: + node_in = n in self._nodes + except TypeError: + n, d = n + return n in self._nodes and self[n] == d + if node_in is True: + return node_in + try: + n, d = n + except (TypeError, ValueError): + return False + return n in self._nodes and self[n] == d + + def __getitem__(self, n): + if isinstance(n, slice): + raise nx.NetworkXError( + f"{type(self).__name__} does not support slicing, " + f"try list(G.nodes.data())[{n.start}:{n.stop}:{n.step}]" + ) + ddict = self._nodes[n] + data = self._data + if data is False or data is True: + return ddict + return ddict[data] if data in ddict else self._default + + def __str__(self): + return str(list(self)) + + def __repr__(self): + name = self.__class__.__name__ + if self._data is False: + return f"{name}({tuple(self)})" + if self._data is True: + return f"{name}({dict(self)})" + return f"{name}({dict(self)}, data={self._data!r})" + + +# DegreeViews +class DiDegreeView: + """A View class for degree of nodes in a NetworkX Graph + + The functionality is like dict.items() with (node, degree) pairs. + Additional functionality includes read-only lookup of node degree, + and calling with optional features nbunch (for only a subset of nodes) + and weight (use edge weights to compute degree). + + Parameters + ========== + graph : NetworkX graph-like class + nbunch : node, container of nodes, or None meaning all nodes (default=None) + weight : bool or string (default=None) + + Notes + ----- + DegreeView can still lookup any node even if nbunch is specified. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> DV = G.degree() + >>> assert DV[2] == 1 + >>> assert sum(deg for n, deg in DV) == 4 + + >>> DVweight = G.degree(weight="span") + >>> G.add_edge(1, 2, span=34) + >>> DVweight[2] + 34 + >>> DVweight[0] # default edge weight is 1 + 1 + >>> sum(span for n, span in DVweight) # sum weighted degrees + 70 + + >>> DVnbunch = G.degree(nbunch=(1, 2)) + >>> assert len(list(DVnbunch)) == 2 # iteration over nbunch only + """ + + def __init__(self, G, nbunch=None, weight=None): + self._graph = G + self._succ = G._succ if hasattr(G, "_succ") else G._adj + self._pred = G._pred if hasattr(G, "_pred") else G._adj + self._nodes = self._succ if nbunch is None else list(G.nbunch_iter(nbunch)) + self._weight = weight + + def __call__(self, nbunch=None, weight=None): + if nbunch is None: + if weight == self._weight: + return self + return self.__class__(self._graph, None, weight) + try: + if nbunch in self._nodes: + if weight == self._weight: + return self[nbunch] + return self.__class__(self._graph, None, weight)[nbunch] + except TypeError: + pass + return self.__class__(self._graph, nbunch, weight) + + def __getitem__(self, n): + weight = self._weight + succs = self._succ[n] + preds = self._pred[n] + if weight is None: + return len(succs) + len(preds) + return sum(dd.get(weight, 1) for dd in succs.values()) + sum( + dd.get(weight, 1) for dd in preds.values() + ) + + def __iter__(self): + weight = self._weight + if weight is None: + for n in self._nodes: + succs = self._succ[n] + preds = self._pred[n] + yield (n, len(succs) + len(preds)) + else: + for n in self._nodes: + succs = self._succ[n] + preds = self._pred[n] + deg = sum(dd.get(weight, 1) for dd in succs.values()) + sum( + dd.get(weight, 1) for dd in preds.values() + ) + yield (n, deg) + + def __len__(self): + return len(self._nodes) + + def __str__(self): + return str(list(self)) + + def __repr__(self): + return f"{self.__class__.__name__}({dict(self)})" + + +class DegreeView(DiDegreeView): + """A DegreeView class to act as G.degree for a NetworkX Graph + + Typical usage focuses on iteration over `(node, degree)` pairs. + The degree is by default the number of edges incident to the node. + Optional argument `weight` enables weighted degree using the edge + attribute named in the `weight` argument. Reporting and iteration + can also be restricted to a subset of nodes using `nbunch`. + + Additional functionality include node lookup so that `G.degree[n]` + reported the (possibly weighted) degree of node `n`. Calling the + view creates a view with different arguments `nbunch` or `weight`. + + Parameters + ========== + graph : NetworkX graph-like class + nbunch : node, container of nodes, or None meaning all nodes (default=None) + weight : string or None (default=None) + + Notes + ----- + DegreeView can still lookup any node even if nbunch is specified. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> DV = G.degree() + >>> assert DV[2] == 1 + >>> assert G.degree[2] == 1 + >>> assert sum(deg for n, deg in DV) == 4 + + >>> DVweight = G.degree(weight="span") + >>> G.add_edge(1, 2, span=34) + >>> DVweight[2] + 34 + >>> DVweight[0] # default edge weight is 1 + 1 + >>> sum(span for n, span in DVweight) # sum weighted degrees + 70 + + >>> DVnbunch = G.degree(nbunch=(1, 2)) + >>> assert len(list(DVnbunch)) == 2 # iteration over nbunch only + """ + + def __getitem__(self, n): + weight = self._weight + nbrs = self._succ[n] + if weight is None: + return len(nbrs) + (n in nbrs) + return sum(dd.get(weight, 1) for dd in nbrs.values()) + ( + n in nbrs and nbrs[n].get(weight, 1) + ) + + def __iter__(self): + weight = self._weight + if weight is None: + for n in self._nodes: + nbrs = self._succ[n] + yield (n, len(nbrs) + (n in nbrs)) + else: + for n in self._nodes: + nbrs = self._succ[n] + deg = sum(dd.get(weight, 1) for dd in nbrs.values()) + ( + n in nbrs and nbrs[n].get(weight, 1) + ) + yield (n, deg) + + +class OutDegreeView(DiDegreeView): + """A DegreeView class to report out_degree for a DiGraph; See DegreeView""" + + def __getitem__(self, n): + weight = self._weight + nbrs = self._succ[n] + if self._weight is None: + return len(nbrs) + return sum(dd.get(self._weight, 1) for dd in nbrs.values()) + + def __iter__(self): + weight = self._weight + if weight is None: + for n in self._nodes: + succs = self._succ[n] + yield (n, len(succs)) + else: + for n in self._nodes: + succs = self._succ[n] + deg = sum(dd.get(weight, 1) for dd in succs.values()) + yield (n, deg) + + +class InDegreeView(DiDegreeView): + """A DegreeView class to report in_degree for a DiGraph; See DegreeView""" + + def __getitem__(self, n): + weight = self._weight + nbrs = self._pred[n] + if weight is None: + return len(nbrs) + return sum(dd.get(weight, 1) for dd in nbrs.values()) + + def __iter__(self): + weight = self._weight + if weight is None: + for n in self._nodes: + preds = self._pred[n] + yield (n, len(preds)) + else: + for n in self._nodes: + preds = self._pred[n] + deg = sum(dd.get(weight, 1) for dd in preds.values()) + yield (n, deg) + + +class MultiDegreeView(DiDegreeView): + """A DegreeView class for undirected multigraphs; See DegreeView""" + + def __getitem__(self, n): + weight = self._weight + nbrs = self._succ[n] + if weight is None: + return sum(len(keys) for keys in nbrs.values()) + ( + n in nbrs and len(nbrs[n]) + ) + # edge weighted graph - degree is sum of nbr edge weights + deg = sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) + if n in nbrs: + deg += sum(d.get(weight, 1) for d in nbrs[n].values()) + return deg + + def __iter__(self): + weight = self._weight + if weight is None: + for n in self._nodes: + nbrs = self._succ[n] + deg = sum(len(keys) for keys in nbrs.values()) + ( + n in nbrs and len(nbrs[n]) + ) + yield (n, deg) + else: + for n in self._nodes: + nbrs = self._succ[n] + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) + if n in nbrs: + deg += sum(d.get(weight, 1) for d in nbrs[n].values()) + yield (n, deg) + + +class DiMultiDegreeView(DiDegreeView): + """A DegreeView class for MultiDiGraph; See DegreeView""" + + def __getitem__(self, n): + weight = self._weight + succs = self._succ[n] + preds = self._pred[n] + if weight is None: + return sum(len(keys) for keys in succs.values()) + sum( + len(keys) for keys in preds.values() + ) + # edge weighted graph - degree is sum of nbr edge weights + deg = sum( + d.get(weight, 1) for key_dict in succs.values() for d in key_dict.values() + ) + sum( + d.get(weight, 1) for key_dict in preds.values() for d in key_dict.values() + ) + return deg + + def __iter__(self): + weight = self._weight + if weight is None: + for n in self._nodes: + succs = self._succ[n] + preds = self._pred[n] + deg = sum(len(keys) for keys in succs.values()) + sum( + len(keys) for keys in preds.values() + ) + yield (n, deg) + else: + for n in self._nodes: + succs = self._succ[n] + preds = self._pred[n] + deg = sum( + d.get(weight, 1) + for key_dict in succs.values() + for d in key_dict.values() + ) + sum( + d.get(weight, 1) + for key_dict in preds.values() + for d in key_dict.values() + ) + yield (n, deg) + + +class InMultiDegreeView(DiDegreeView): + """A DegreeView class for inward degree of MultiDiGraph; See DegreeView""" + + def __getitem__(self, n): + weight = self._weight + nbrs = self._pred[n] + if weight is None: + return sum(len(data) for data in nbrs.values()) + # edge weighted graph - degree is sum of nbr edge weights + return sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) + + def __iter__(self): + weight = self._weight + if weight is None: + for n in self._nodes: + nbrs = self._pred[n] + deg = sum(len(data) for data in nbrs.values()) + yield (n, deg) + else: + for n in self._nodes: + nbrs = self._pred[n] + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) + yield (n, deg) + + +class OutMultiDegreeView(DiDegreeView): + """A DegreeView class for outward degree of MultiDiGraph; See DegreeView""" + + def __getitem__(self, n): + weight = self._weight + nbrs = self._succ[n] + if weight is None: + return sum(len(data) for data in nbrs.values()) + # edge weighted graph - degree is sum of nbr edge weights + return sum( + d.get(weight, 1) for key_dict in nbrs.values() for d in key_dict.values() + ) + + def __iter__(self): + weight = self._weight + if weight is None: + for n in self._nodes: + nbrs = self._succ[n] + deg = sum(len(data) for data in nbrs.values()) + yield (n, deg) + else: + for n in self._nodes: + nbrs = self._succ[n] + deg = sum( + d.get(weight, 1) + for key_dict in nbrs.values() + for d in key_dict.values() + ) + yield (n, deg) + + +# A base class for all edge views. Ensures all edge view and edge data view +# objects/classes are captured by `isinstance(obj, EdgeViewABC)` and +# `issubclass(cls, EdgeViewABC)` respectively +class EdgeViewABC(ABC): + pass + + +# EdgeDataViews +class OutEdgeDataView(EdgeViewABC): + """EdgeDataView for outward edges of DiGraph; See EdgeDataView""" + + __slots__ = ( + "_viewer", + "_nbunch", + "_data", + "_default", + "_adjdict", + "_nodes_nbrs", + "_report", + ) + + def __getstate__(self): + return { + "viewer": self._viewer, + "nbunch": self._nbunch, + "data": self._data, + "default": self._default, + } + + def __setstate__(self, state): + self.__init__(**state) + + def __init__(self, viewer, nbunch=None, data=False, *, default=None): + self._viewer = viewer + adjdict = self._adjdict = viewer._adjdict + if nbunch is None: + self._nodes_nbrs = adjdict.items + else: + # dict retains order of nodes but acts like a set + nbunch = dict.fromkeys(viewer._graph.nbunch_iter(nbunch)) + self._nodes_nbrs = lambda: [(n, adjdict[n]) for n in nbunch] + self._nbunch = nbunch + self._data = data + self._default = default + # Set _report based on data and default + if data is True: + self._report = lambda n, nbr, dd: (n, nbr, dd) + elif data is False: + self._report = lambda n, nbr, dd: (n, nbr) + else: # data is attribute name + self._report = ( + lambda n, nbr, dd: (n, nbr, dd[data]) + if data in dd + else (n, nbr, default) + ) + + def __len__(self): + return sum(len(nbrs) for n, nbrs in self._nodes_nbrs()) + + def __iter__(self): + return ( + self._report(n, nbr, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, dd in nbrs.items() + ) + + def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and u not in self._nbunch: + return False # this edge doesn't start in nbunch + try: + ddict = self._adjdict[u][v] + except KeyError: + return False + return e == self._report(u, v, ddict) + + def __str__(self): + return str(list(self)) + + def __repr__(self): + return f"{self.__class__.__name__}({list(self)})" + + +class EdgeDataView(OutEdgeDataView): + """A EdgeDataView class for edges of Graph + + This view is primarily used to iterate over the edges reporting + edges as node-tuples with edge data optionally reported. The + argument `nbunch` allows restriction to edges incident to nodes + in that container/singleton. The default (nbunch=None) + reports all edges. The arguments `data` and `default` control + what edge data is reported. The default `data is False` reports + only node-tuples for each edge. If `data is True` the entire edge + data dict is returned. Otherwise `data` is assumed to hold the name + of the edge attribute to report with default `default` if that + edge attribute is not present. + + Parameters + ---------- + nbunch : container of nodes, node or None (default None) + data : False, True or string (default False) + default : default value (default None) + + Examples + -------- + >>> G = nx.path_graph(3) + >>> G.add_edge(1, 2, foo="bar") + >>> list(G.edges(data="foo", default="biz")) + [(0, 1, 'biz'), (1, 2, 'bar')] + >>> assert (0, 1, "biz") in G.edges(data="foo", default="biz") + """ + + __slots__ = () + + def __len__(self): + return sum(1 for e in self) + + def __iter__(self): + seen = {} + for n, nbrs in self._nodes_nbrs(): + for nbr, dd in nbrs.items(): + if nbr not in seen: + yield self._report(n, nbr, dd) + seen[n] = 1 + del seen + + def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and u not in self._nbunch and v not in self._nbunch: + return False # this edge doesn't start and it doesn't end in nbunch + try: + ddict = self._adjdict[u][v] + except KeyError: + return False + return e == self._report(u, v, ddict) + + +class InEdgeDataView(OutEdgeDataView): + """An EdgeDataView class for outward edges of DiGraph; See EdgeDataView""" + + __slots__ = () + + def __iter__(self): + return ( + self._report(nbr, n, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, dd in nbrs.items() + ) + + def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and v not in self._nbunch: + return False # this edge doesn't end in nbunch + try: + ddict = self._adjdict[v][u] + except KeyError: + return False + return e == self._report(u, v, ddict) + + +class OutMultiEdgeDataView(OutEdgeDataView): + """An EdgeDataView for outward edges of MultiDiGraph; See EdgeDataView""" + + __slots__ = ("keys",) + + def __getstate__(self): + return { + "viewer": self._viewer, + "nbunch": self._nbunch, + "keys": self.keys, + "data": self._data, + "default": self._default, + } + + def __setstate__(self, state): + self.__init__(**state) + + def __init__(self, viewer, nbunch=None, data=False, *, default=None, keys=False): + self._viewer = viewer + adjdict = self._adjdict = viewer._adjdict + self.keys = keys + if nbunch is None: + self._nodes_nbrs = adjdict.items + else: + # dict retains order of nodes but acts like a set + nbunch = dict.fromkeys(viewer._graph.nbunch_iter(nbunch)) + self._nodes_nbrs = lambda: [(n, adjdict[n]) for n in nbunch] + self._nbunch = nbunch + self._data = data + self._default = default + # Set _report based on data and default + if data is True: + if keys is True: + self._report = lambda n, nbr, k, dd: (n, nbr, k, dd) + else: + self._report = lambda n, nbr, k, dd: (n, nbr, dd) + elif data is False: + if keys is True: + self._report = lambda n, nbr, k, dd: (n, nbr, k) + else: + self._report = lambda n, nbr, k, dd: (n, nbr) + else: # data is attribute name + if keys is True: + self._report = ( + lambda n, nbr, k, dd: (n, nbr, k, dd[data]) + if data in dd + else (n, nbr, k, default) + ) + else: + self._report = ( + lambda n, nbr, k, dd: (n, nbr, dd[data]) + if data in dd + else (n, nbr, default) + ) + + def __len__(self): + return sum(1 for e in self) + + def __iter__(self): + return ( + self._report(n, nbr, k, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, kd in nbrs.items() + for k, dd in kd.items() + ) + + def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and u not in self._nbunch: + return False # this edge doesn't start in nbunch + try: + kdict = self._adjdict[u][v] + except KeyError: + return False + if self.keys is True: + k = e[2] + try: + dd = kdict[k] + except KeyError: + return False + return e == self._report(u, v, k, dd) + return any(e == self._report(u, v, k, dd) for k, dd in kdict.items()) + + +class MultiEdgeDataView(OutMultiEdgeDataView): + """An EdgeDataView class for edges of MultiGraph; See EdgeDataView""" + + __slots__ = () + + def __iter__(self): + seen = {} + for n, nbrs in self._nodes_nbrs(): + for nbr, kd in nbrs.items(): + if nbr not in seen: + for k, dd in kd.items(): + yield self._report(n, nbr, k, dd) + seen[n] = 1 + del seen + + def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and u not in self._nbunch and v not in self._nbunch: + return False # this edge doesn't start and doesn't end in nbunch + try: + kdict = self._adjdict[u][v] + except KeyError: + try: + kdict = self._adjdict[v][u] + except KeyError: + return False + if self.keys is True: + k = e[2] + try: + dd = kdict[k] + except KeyError: + return False + return e == self._report(u, v, k, dd) + return any(e == self._report(u, v, k, dd) for k, dd in kdict.items()) + + +class InMultiEdgeDataView(OutMultiEdgeDataView): + """An EdgeDataView for inward edges of MultiDiGraph; See EdgeDataView""" + + __slots__ = () + + def __iter__(self): + return ( + self._report(nbr, n, k, dd) + for n, nbrs in self._nodes_nbrs() + for nbr, kd in nbrs.items() + for k, dd in kd.items() + ) + + def __contains__(self, e): + u, v = e[:2] + if self._nbunch is not None and v not in self._nbunch: + return False # this edge doesn't end in nbunch + try: + kdict = self._adjdict[v][u] + except KeyError: + return False + if self.keys is True: + k = e[2] + dd = kdict[k] + return e == self._report(u, v, k, dd) + return any(e == self._report(u, v, k, dd) for k, dd in kdict.items()) + + +# EdgeViews have set operations and no data reported +class OutEdgeView(Set, Mapping, EdgeViewABC): + """A EdgeView class for outward edges of a DiGraph""" + + __slots__ = ("_adjdict", "_graph", "_nodes_nbrs") + + def __getstate__(self): + return {"_graph": self._graph, "_adjdict": self._adjdict} + + def __setstate__(self, state): + self._graph = state["_graph"] + self._adjdict = state["_adjdict"] + self._nodes_nbrs = self._adjdict.items + + @classmethod + def _from_iterable(cls, it): + return set(it) + + dataview = OutEdgeDataView + + def __init__(self, G): + self._graph = G + self._adjdict = G._succ if hasattr(G, "succ") else G._adj + self._nodes_nbrs = self._adjdict.items + + # Set methods + def __len__(self): + return sum(len(nbrs) for n, nbrs in self._nodes_nbrs()) + + def __iter__(self): + for n, nbrs in self._nodes_nbrs(): + for nbr in nbrs: + yield (n, nbr) + + def __contains__(self, e): + try: + u, v = e + return v in self._adjdict[u] + except KeyError: + return False + + # Mapping Methods + def __getitem__(self, e): + if isinstance(e, slice): + raise nx.NetworkXError( + f"{type(self).__name__} does not support slicing, " + f"try list(G.edges)[{e.start}:{e.stop}:{e.step}]" + ) + u, v = e + try: + return self._adjdict[u][v] + except KeyError as ex: # Customize msg to indicate exception origin + raise KeyError(f"The edge {e} is not in the graph.") + + # EdgeDataView methods + def __call__(self, nbunch=None, data=False, *, default=None): + if nbunch is None and data is False: + return self + return self.dataview(self, nbunch, data, default=default) + + def data(self, data=True, default=None, nbunch=None): + """ + Return a read-only view of edge data. + + Parameters + ---------- + data : bool or edge attribute key + If ``data=True``, then the data view maps each edge to a dictionary + containing all of its attributes. If `data` is a key in the edge + dictionary, then the data view maps each edge to its value for + the keyed attribute. In this case, if the edge doesn't have the + attribute, the `default` value is returned. + default : object, default=None + The value used when an edge does not have a specific attribute + nbunch : container of nodes, optional (default=None) + Allows restriction to edges only involving certain nodes. All edges + are considered by default. + + Returns + ------- + dataview + Returns an `EdgeDataView` for undirected Graphs, `OutEdgeDataView` + for DiGraphs, `MultiEdgeDataView` for MultiGraphs and + `OutMultiEdgeDataView` for MultiDiGraphs. + + Notes + ----- + If ``data=False``, returns an `EdgeView` without any edge data. + + See Also + -------- + EdgeDataView + OutEdgeDataView + MultiEdgeDataView + OutMultiEdgeDataView + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edges_from( + ... [ + ... (0, 1, {"dist": 3, "capacity": 20}), + ... (1, 2, {"dist": 4}), + ... (2, 0, {"dist": 5}), + ... ] + ... ) + + Accessing edge data with ``data=True`` (the default) returns an + edge data view object listing each edge with all of its attributes: + + >>> G.edges.data() + EdgeDataView([(0, 1, {'dist': 3, 'capacity': 20}), (0, 2, {'dist': 5}), (1, 2, {'dist': 4})]) + + If `data` represents a key in the edge attribute dict, a dataview listing + each edge with its value for that specific key is returned: + + >>> G.edges.data("dist") + EdgeDataView([(0, 1, 3), (0, 2, 5), (1, 2, 4)]) + + `nbunch` can be used to limit the edges: + + >>> G.edges.data("dist", nbunch=[0]) + EdgeDataView([(0, 1, 3), (0, 2, 5)]) + + If a specific key is not found in an edge attribute dict, the value + specified by `default` is used: + + >>> G.edges.data("capacity") + EdgeDataView([(0, 1, 20), (0, 2, None), (1, 2, None)]) + + Note that there is no check that the `data` key is present in any of + the edge attribute dictionaries: + + >>> G.edges.data("speed") + EdgeDataView([(0, 1, None), (0, 2, None), (1, 2, None)]) + """ + if nbunch is None and data is False: + return self + return self.dataview(self, nbunch, data, default=default) + + # String Methods + def __str__(self): + return str(list(self)) + + def __repr__(self): + return f"{self.__class__.__name__}({list(self)})" + + +class EdgeView(OutEdgeView): + """A EdgeView class for edges of a Graph + + This densely packed View allows iteration over edges, data lookup + like a dict and set operations on edges represented by node-tuples. + In addition, edge data can be controlled by calling this object + possibly creating an EdgeDataView. Typically edges are iterated over + and reported as `(u, v)` node tuples or `(u, v, key)` node/key tuples + for multigraphs. Those edge representations can also be using to + lookup the data dict for any edge. Set operations also are available + where those tuples are the elements of the set. + Calling this object with optional arguments `data`, `default` and `keys` + controls the form of the tuple (see EdgeDataView). Optional argument + `nbunch` allows restriction to edges only involving certain nodes. + + If `data is False` (the default) then iterate over 2-tuples `(u, v)`. + If `data is True` iterate over 3-tuples `(u, v, datadict)`. + Otherwise iterate over `(u, v, datadict.get(data, default))`. + For Multigraphs, if `keys is True`, replace `u, v` with `u, v, key` above. + + Parameters + ========== + graph : NetworkX graph-like class + nbunch : (default= all nodes in graph) only report edges with these nodes + keys : (only for MultiGraph. default=False) report edge key in tuple + data : bool or string (default=False) see above + default : object (default=None) + + Examples + ======== + >>> G = nx.path_graph(4) + >>> EV = G.edges() + >>> (2, 3) in EV + True + >>> for u, v in EV: + ... print((u, v)) + (0, 1) + (1, 2) + (2, 3) + >>> assert EV & {(1, 2), (3, 4)} == {(1, 2)} + + >>> EVdata = G.edges(data="color", default="aqua") + >>> G.add_edge(2, 3, color="blue") + >>> assert (2, 3, "blue") in EVdata + >>> for u, v, c in EVdata: + ... print(f"({u}, {v}) has color: {c}") + (0, 1) has color: aqua + (1, 2) has color: aqua + (2, 3) has color: blue + + >>> EVnbunch = G.edges(nbunch=2) + >>> assert (2, 3) in EVnbunch + >>> assert (0, 1) not in EVnbunch + >>> for u, v in EVnbunch: + ... assert u == 2 or v == 2 + + >>> MG = nx.path_graph(4, create_using=nx.MultiGraph) + >>> EVmulti = MG.edges(keys=True) + >>> (2, 3, 0) in EVmulti + True + >>> (2, 3) in EVmulti # 2-tuples work even when keys is True + True + >>> key = MG.add_edge(2, 3) + >>> for u, v, k in EVmulti: + ... print((u, v, k)) + (0, 1, 0) + (1, 2, 0) + (2, 3, 0) + (2, 3, 1) + """ + + __slots__ = () + + dataview = EdgeDataView + + def __len__(self): + num_nbrs = (len(nbrs) + (n in nbrs) for n, nbrs in self._nodes_nbrs()) + return sum(num_nbrs) // 2 + + def __iter__(self): + seen = {} + for n, nbrs in self._nodes_nbrs(): + for nbr in list(nbrs): + if nbr not in seen: + yield (n, nbr) + seen[n] = 1 + del seen + + def __contains__(self, e): + try: + u, v = e[:2] + return v in self._adjdict[u] or u in self._adjdict[v] + except (KeyError, ValueError): + return False + + +class InEdgeView(OutEdgeView): + """A EdgeView class for inward edges of a DiGraph""" + + __slots__ = () + + def __setstate__(self, state): + self._graph = state["_graph"] + self._adjdict = state["_adjdict"] + self._nodes_nbrs = self._adjdict.items + + dataview = InEdgeDataView + + def __init__(self, G): + self._graph = G + self._adjdict = G._pred if hasattr(G, "pred") else G._adj + self._nodes_nbrs = self._adjdict.items + + def __iter__(self): + for n, nbrs in self._nodes_nbrs(): + for nbr in nbrs: + yield (nbr, n) + + def __contains__(self, e): + try: + u, v = e + return u in self._adjdict[v] + except KeyError: + return False + + def __getitem__(self, e): + if isinstance(e, slice): + raise nx.NetworkXError( + f"{type(self).__name__} does not support slicing, " + f"try list(G.in_edges)[{e.start}:{e.stop}:{e.step}]" + ) + u, v = e + return self._adjdict[v][u] + + +class OutMultiEdgeView(OutEdgeView): + """A EdgeView class for outward edges of a MultiDiGraph""" + + __slots__ = () + + dataview = OutMultiEdgeDataView + + def __len__(self): + return sum( + len(kdict) for n, nbrs in self._nodes_nbrs() for nbr, kdict in nbrs.items() + ) + + def __iter__(self): + for n, nbrs in self._nodes_nbrs(): + for nbr, kdict in nbrs.items(): + for key in kdict: + yield (n, nbr, key) + + def __contains__(self, e): + N = len(e) + if N == 3: + u, v, k = e + elif N == 2: + u, v = e + k = 0 + else: + raise ValueError("MultiEdge must have length 2 or 3") + try: + return k in self._adjdict[u][v] + except KeyError: + return False + + def __getitem__(self, e): + if isinstance(e, slice): + raise nx.NetworkXError( + f"{type(self).__name__} does not support slicing, " + f"try list(G.edges)[{e.start}:{e.stop}:{e.step}]" + ) + u, v, k = e + return self._adjdict[u][v][k] + + def __call__(self, nbunch=None, data=False, *, default=None, keys=False): + if nbunch is None and data is False and keys is True: + return self + return self.dataview(self, nbunch, data, default=default, keys=keys) + + def data(self, data=True, default=None, nbunch=None, keys=False): + if nbunch is None and data is False and keys is True: + return self + return self.dataview(self, nbunch, data, default=default, keys=keys) + + +class MultiEdgeView(OutMultiEdgeView): + """A EdgeView class for edges of a MultiGraph""" + + __slots__ = () + + dataview = MultiEdgeDataView + + def __len__(self): + return sum(1 for e in self) + + def __iter__(self): + seen = {} + for n, nbrs in self._nodes_nbrs(): + for nbr, kd in nbrs.items(): + if nbr not in seen: + for k, dd in kd.items(): + yield (n, nbr, k) + seen[n] = 1 + del seen + + +class InMultiEdgeView(OutMultiEdgeView): + """A EdgeView class for inward edges of a MultiDiGraph""" + + __slots__ = () + + def __setstate__(self, state): + self._graph = state["_graph"] + self._adjdict = state["_adjdict"] + self._nodes_nbrs = self._adjdict.items + + dataview = InMultiEdgeDataView + + def __init__(self, G): + self._graph = G + self._adjdict = G._pred if hasattr(G, "pred") else G._adj + self._nodes_nbrs = self._adjdict.items + + def __iter__(self): + for n, nbrs in self._nodes_nbrs(): + for nbr, kdict in nbrs.items(): + for key in kdict: + yield (nbr, n, key) + + def __contains__(self, e): + N = len(e) + if N == 3: + u, v, k = e + elif N == 2: + u, v = e + k = 0 + else: + raise ValueError("MultiEdge must have length 2 or 3") + try: + return k in self._adjdict[v][u] + except KeyError: + return False + + def __getitem__(self, e): + if isinstance(e, slice): + raise nx.NetworkXError( + f"{type(self).__name__} does not support slicing, " + f"try list(G.in_edges)[{e.start}:{e.stop}:{e.step}]" + ) + u, v, k = e + return self._adjdict[v][u][k] diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3b9aa5e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-312.pyc new file mode 100644 index 0000000..9e4ca72 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/dispatch_interface.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/historical_tests.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/historical_tests.cpython-312.pyc new file mode 100644 index 0000000..58fd56f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/historical_tests.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-312.pyc new file mode 100644 index 0000000..2821f0c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_coreviews.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-312.pyc new file mode 100644 index 0000000..42f5ab4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_digraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_digraph_historical.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_digraph_historical.cpython-312.pyc new file mode 100644 index 0000000..58f2811 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_digraph_historical.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-312.pyc new file mode 100644 index 0000000..2315a71 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_filters.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_function.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_function.cpython-312.pyc new file mode 100644 index 0000000..4b0e358 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_function.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-312.pyc new file mode 100644 index 0000000..c4ae68f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-312.pyc new file mode 100644 index 0000000..e7c8b28 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graph_historical.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-312.pyc new file mode 100644 index 0000000..dc14a49 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_graphviews.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_multidigraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_multidigraph.cpython-312.pyc new file mode 100644 index 0000000..761e2c6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_multidigraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-312.pyc new file mode 100644 index 0000000..59df1a8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_multigraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-312.pyc new file mode 100644 index 0000000..18f6794 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_reportviews.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-312.pyc new file mode 100644 index 0000000..a434a2e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_special.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-312.pyc new file mode 100644 index 0000000..edee9c8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/classes/tests/__pycache__/test_subgraphviews.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/dispatch_interface.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/dispatch_interface.py new file mode 100644 index 0000000..5cc908d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/dispatch_interface.py @@ -0,0 +1,185 @@ +# This file contains utilities for testing the dispatching feature + +# A full test of all dispatchable algorithms is performed by +# modifying the pytest invocation and setting an environment variable +# NETWORKX_TEST_BACKEND=nx_loopback pytest +# This is comprehensive, but only tests the `test_override_dispatch` +# function in networkx.classes.backends. + +# To test the `_dispatchable` function directly, several tests scattered throughout +# NetworkX have been augmented to test normal and dispatch mode. +# Searching for `dispatch_interface` should locate the specific tests. + +import networkx as nx +from networkx import DiGraph, Graph, MultiDiGraph, MultiGraph, PlanarEmbedding +from networkx.classes.reportviews import NodeView + + +class LoopbackGraph(Graph): + __networkx_backend__ = "nx_loopback" + + +class LoopbackDiGraph(DiGraph): + __networkx_backend__ = "nx_loopback" + + +class LoopbackMultiGraph(MultiGraph): + __networkx_backend__ = "nx_loopback" + + +class LoopbackMultiDiGraph(MultiDiGraph): + __networkx_backend__ = "nx_loopback" + + +class LoopbackPlanarEmbedding(PlanarEmbedding): + __networkx_backend__ = "nx_loopback" + + +def convert(graph): + if isinstance(graph, PlanarEmbedding): + return LoopbackPlanarEmbedding(graph) + if isinstance(graph, MultiDiGraph): + return LoopbackMultiDiGraph(graph) + if isinstance(graph, MultiGraph): + return LoopbackMultiGraph(graph) + if isinstance(graph, DiGraph): + return LoopbackDiGraph(graph) + if isinstance(graph, Graph): + return LoopbackGraph(graph) + raise TypeError(f"Unsupported type of graph: {type(graph)}") + + +class LoopbackBackendInterface: + def __getattr__(self, item): + try: + return nx.utils.backends._registered_algorithms[item].orig_func + except KeyError: + raise AttributeError(item) from None + + @staticmethod + def convert_from_nx( + graph, + *, + edge_attrs=None, + node_attrs=None, + preserve_edge_attrs=None, + preserve_node_attrs=None, + preserve_graph_attrs=None, + name=None, + graph_name=None, + ): + if name in { + # Raise if input graph changes. See test_dag.py::test_topological_sort6 + "lexicographical_topological_sort", + "topological_generations", + "topological_sort", + # Would be nice to some day avoid these cutoffs of full testing + }: + return graph + if isinstance(graph, NodeView): + # Convert to a Graph with only nodes (no edges) + new_graph = Graph() + new_graph.add_nodes_from(graph.items()) + graph = new_graph + G = LoopbackGraph() + elif not isinstance(graph, Graph): + raise TypeError( + f"Bad type for graph argument {graph_name} in {name}: {type(graph)}" + ) + elif graph.__class__ in {Graph, LoopbackGraph}: + G = LoopbackGraph() + elif graph.__class__ in {DiGraph, LoopbackDiGraph}: + G = LoopbackDiGraph() + elif graph.__class__ in {MultiGraph, LoopbackMultiGraph}: + G = LoopbackMultiGraph() + elif graph.__class__ in {MultiDiGraph, LoopbackMultiDiGraph}: + G = LoopbackMultiDiGraph() + elif graph.__class__ in {PlanarEmbedding, LoopbackPlanarEmbedding}: + G = LoopbackDiGraph() # or LoopbackPlanarEmbedding + else: + # Would be nice to handle these better some day + # nx.algorithms.approximation.kcomponents._AntiGraph + # nx.classes.tests.test_multidigraph.MultiDiGraphSubClass + # nx.classes.tests.test_multigraph.MultiGraphSubClass + G = graph.__class__() + + if preserve_graph_attrs: + G.graph.update(graph.graph) + + # add nodes + G.add_nodes_from(graph) + if preserve_node_attrs: + for n, dd in G._node.items(): + dd.update(graph.nodes[n]) + elif node_attrs: + for n, dd in G._node.items(): + dd.update( + (attr, graph._node[n].get(attr, default)) + for attr, default in node_attrs.items() + if default is not None or attr in graph._node[n] + ) + + # tools to build datadict and keydict + if preserve_edge_attrs: + + def G_new_datadict(old_dd): + return G.edge_attr_dict_factory(old_dd) + elif edge_attrs: + + def G_new_datadict(old_dd): + return G.edge_attr_dict_factory( + (attr, old_dd.get(attr, default)) + for attr, default in edge_attrs.items() + if default is not None or attr in old_dd + ) + else: + + def G_new_datadict(old_dd): + return G.edge_attr_dict_factory() + + if G.is_multigraph(): + + def G_new_inner(keydict): + kd = G.adjlist_inner_dict_factory( + (k, G_new_datadict(dd)) for k, dd in keydict.items() + ) + return kd + else: + G_new_inner = G_new_datadict + + # add edges keeping the same order in _adj and _pred + G_adj = G._adj + if G.is_directed(): + for n, nbrs in graph._adj.items(): + G_adj[n].update((nbr, G_new_inner(dd)) for nbr, dd in nbrs.items()) + # ensure same datadict for pred and adj; and pred order of graph._pred + G_pred = G._pred + for n, nbrs in graph._pred.items(): + G_pred[n].update((nbr, G_adj[nbr][n]) for nbr in nbrs) + else: # undirected + for n, nbrs in graph._adj.items(): + # ensure same datadict for both ways; and adj order of graph._adj + G_adj[n].update( + (nbr, G_adj[nbr][n] if n in G_adj[nbr] else G_new_inner(dd)) + for nbr, dd in nbrs.items() + ) + + return G + + @staticmethod + def convert_to_nx(obj, *, name=None): + return obj + + @staticmethod + def on_start_tests(items): + # Verify that items can be xfailed + for item in items: + assert hasattr(item, "add_marker") + + def can_run(self, name, args, kwargs): + # It is unnecessary to define this function if algorithms are fully supported. + # We include it for illustration purposes. + return hasattr(self, name) + + +backend_interface = LoopbackBackendInterface() diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/historical_tests.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/historical_tests.py new file mode 100644 index 0000000..9dad24e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/historical_tests.py @@ -0,0 +1,475 @@ +"""Original NetworkX graph tests""" + +import pytest + +import networkx as nx +from networkx import convert_node_labels_to_integers as cnlti +from networkx.utils import edges_equal, nodes_equal + + +class HistoricalTests: + @classmethod + def setup_class(cls): + cls.null = nx.null_graph() + cls.P1 = cnlti(nx.path_graph(1), first_label=1) + cls.P3 = cnlti(nx.path_graph(3), first_label=1) + cls.P10 = cnlti(nx.path_graph(10), first_label=1) + cls.K1 = cnlti(nx.complete_graph(1), first_label=1) + cls.K3 = cnlti(nx.complete_graph(3), first_label=1) + cls.K4 = cnlti(nx.complete_graph(4), first_label=1) + cls.K5 = cnlti(nx.complete_graph(5), first_label=1) + cls.K10 = cnlti(nx.complete_graph(10), first_label=1) + cls.G = nx.Graph + + def test_name(self): + G = self.G(name="test") + assert G.name == "test" + H = self.G() + assert H.name == "" + + # Nodes + + def test_add_remove_node(self): + G = self.G() + G.add_node("A") + assert G.has_node("A") + G.remove_node("A") + assert not G.has_node("A") + + def test_nonhashable_node(self): + # Test if a non-hashable object is in the Graph. A python dict will + # raise a TypeError, but for a Graph class a simple False should be + # returned (see Graph __contains__). If it cannot be a node then it is + # not a node. + G = self.G() + assert not G.has_node(["A"]) + assert not G.has_node({"A": 1}) + + def test_add_nodes_from(self): + G = self.G() + G.add_nodes_from(list("ABCDEFGHIJKL")) + assert G.has_node("L") + G.remove_nodes_from(["H", "I", "J", "K", "L"]) + G.add_nodes_from([1, 2, 3, 4]) + assert sorted(G.nodes(), key=str) == [ + 1, + 2, + 3, + 4, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + ] + # test __iter__ + assert sorted(G, key=str) == [1, 2, 3, 4, "A", "B", "C", "D", "E", "F", "G"] + + def test_contains(self): + G = self.G() + G.add_node("A") + assert "A" in G + assert [] not in G # never raise a Key or TypeError in this test + assert {1: 1} not in G + + def test_add_remove(self): + # Test add_node and remove_node acting for various nbunch + G = self.G() + G.add_node("m") + assert G.has_node("m") + G.add_node("m") # no complaints + pytest.raises(nx.NetworkXError, G.remove_node, "j") + G.remove_node("m") + assert list(G) == [] + + def test_nbunch_is_list(self): + G = self.G() + G.add_nodes_from(list("ABCD")) + G.add_nodes_from(self.P3) # add nbunch of nodes (nbunch=Graph) + assert sorted(G.nodes(), key=str) == [1, 2, 3, "A", "B", "C", "D"] + G.remove_nodes_from(self.P3) # remove nbunch of nodes (nbunch=Graph) + assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D"] + + def test_nbunch_is_set(self): + G = self.G() + nbunch = set("ABCDEFGHIJKL") + G.add_nodes_from(nbunch) + assert G.has_node("L") + + def test_nbunch_dict(self): + # nbunch is a dict with nodes as keys + G = self.G() + nbunch = set("ABCDEFGHIJKL") + G.add_nodes_from(nbunch) + nbunch = {"I": "foo", "J": 2, "K": True, "L": "spam"} + G.remove_nodes_from(nbunch) + assert sorted(G.nodes(), key=str), ["A", "B", "C", "D", "E", "F", "G", "H"] + + def test_nbunch_iterator(self): + G = self.G() + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) + n_iter = self.P3.nodes() + G.add_nodes_from(n_iter) + assert sorted(G.nodes(), key=str) == [ + 1, + 2, + 3, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + "H", + ] + n_iter = self.P3.nodes() # rebuild same iterator + G.remove_nodes_from(n_iter) # remove nbunch of nodes (nbunch=iterator) + assert sorted(G.nodes(), key=str) == ["A", "B", "C", "D", "E", "F", "G", "H"] + + def test_nbunch_graph(self): + G = self.G() + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) + nbunch = self.K3 + G.add_nodes_from(nbunch) + assert sorted(G.nodes(), key=str), [ + 1, + 2, + 3, + "A", + "B", + "C", + "D", + "E", + "F", + "G", + "H", + ] + + # Edges + + def test_add_edge(self): + G = self.G() + pytest.raises(TypeError, G.add_edge, "A") + + G.add_edge("A", "B") # testing add_edge() + G.add_edge("A", "B") # should fail silently + assert G.has_edge("A", "B") + assert not G.has_edge("A", "C") + assert G.has_edge(*("A", "B")) + if G.is_directed(): + assert not G.has_edge("B", "A") + else: + # G is undirected, so B->A is an edge + assert G.has_edge("B", "A") + + G.add_edge("A", "C") # test directedness + G.add_edge("C", "A") + G.remove_edge("C", "A") + if G.is_directed(): + assert G.has_edge("A", "C") + else: + assert not G.has_edge("A", "C") + assert not G.has_edge("C", "A") + + def test_self_loop(self): + G = self.G() + G.add_edge("A", "A") # test self loops + assert G.has_edge("A", "A") + G.remove_edge("A", "A") + G.add_edge("X", "X") + assert G.has_node("X") + G.remove_node("X") + G.add_edge("A", "Z") # should add the node silently + assert G.has_node("Z") + + def test_add_edges_from(self): + G = self.G() + G.add_edges_from([("B", "C")]) # test add_edges_from() + assert G.has_edge("B", "C") + if G.is_directed(): + assert not G.has_edge("C", "B") + else: + assert G.has_edge("C", "B") # undirected + + G.add_edges_from([("D", "F"), ("B", "D")]) + assert G.has_edge("D", "F") + assert G.has_edge("B", "D") + + if G.is_directed(): + assert not G.has_edge("D", "B") + else: + assert G.has_edge("D", "B") # undirected + + def test_add_edges_from2(self): + G = self.G() + # after failing silently, should add 2nd edge + G.add_edges_from([tuple("IJ"), list("KK"), tuple("JK")]) + assert G.has_edge(*("I", "J")) + assert G.has_edge(*("K", "K")) + assert G.has_edge(*("J", "K")) + if G.is_directed(): + assert not G.has_edge(*("K", "J")) + else: + assert G.has_edge(*("K", "J")) + + def test_add_edges_from3(self): + G = self.G() + G.add_edges_from(zip(list("ACD"), list("CDE"))) + assert G.has_edge("D", "E") + assert not G.has_edge("E", "C") + + def test_remove_edge(self): + G = self.G() + G.add_nodes_from([1, 2, 3, "A", "B", "C", "D", "E", "F", "G", "H"]) + + G.add_edges_from(zip(list("MNOP"), list("NOPM"))) + assert G.has_edge("O", "P") + assert G.has_edge("P", "M") + G.remove_node("P") # tests remove_node()'s handling of edges. + assert not G.has_edge("P", "M") + pytest.raises(TypeError, G.remove_edge, "M") + + G.add_edge("N", "M") + assert G.has_edge("M", "N") + G.remove_edge("M", "N") + assert not G.has_edge("M", "N") + + # self loop fails silently + G.remove_edges_from([list("HI"), list("DF"), tuple("KK"), tuple("JK")]) + assert not G.has_edge("H", "I") + assert not G.has_edge("J", "K") + G.remove_edges_from([list("IJ"), list("KK"), list("JK")]) + assert not G.has_edge("I", "J") + G.remove_nodes_from(set("ZEFHIMNO")) + G.add_edge("J", "K") + + def test_edges_nbunch(self): + # Test G.edges(nbunch) with various forms of nbunch + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + # node not in nbunch should be quietly ignored + pytest.raises(nx.NetworkXError, G.edges, 6) + assert list(G.edges("Z")) == [] # iterable non-node + # nbunch can be an empty list + assert list(G.edges([])) == [] + if G.is_directed(): + elist = [("A", "B"), ("A", "C"), ("B", "D")] + else: + elist = [("A", "B"), ("A", "C"), ("B", "C"), ("B", "D")] + # nbunch can be a list + assert edges_equal(list(G.edges(["A", "B"])), elist) + # nbunch can be a set + assert edges_equal(G.edges({"A", "B"}), elist) + # nbunch can be a graph + G1 = self.G() + G1.add_nodes_from("AB") + assert edges_equal(G.edges(G1), elist) + # nbunch can be a dict with nodes as keys + ndict = {"A": "thing1", "B": "thing2"} + assert edges_equal(G.edges(ndict), elist) + # nbunch can be a single node + assert edges_equal(list(G.edges("A")), [("A", "B"), ("A", "C")]) + assert nodes_equal(sorted(G), ["A", "B", "C", "D"]) + + # nbunch can be nothing (whole graph) + assert edges_equal( + list(G.edges()), + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")], + ) + + def test_degree(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + assert G.degree("A") == 2 + + # degree of single node in iterable container must return dict + assert list(G.degree(["A"])) == [("A", 2)] + assert sorted(d for n, d in G.degree(["A", "B"])) == [2, 3] + assert sorted(d for n, d in G.degree()) == [2, 2, 3, 3] + + def test_degree2(self): + H = self.G() + H.add_edges_from([(1, 24), (1, 2)]) + assert sorted(d for n, d in H.degree([1, 24])) == [1, 2] + + def test_degree_graph(self): + P3 = nx.path_graph(3) + P5 = nx.path_graph(5) + # silently ignore nodes not in P3 + assert dict(d for n, d in P3.degree(["A", "B"])) == {} + # nbunch can be a graph + assert sorted(d for n, d in P5.degree(P3)) == [1, 2, 2] + # nbunch can be a graph that's way too big + assert sorted(d for n, d in P3.degree(P5)) == [1, 1, 2] + assert list(P5.degree([])) == [] + assert dict(P5.degree([])) == {} + + def test_null(self): + null = nx.null_graph() + assert list(null.degree()) == [] + assert dict(null.degree()) == {} + + def test_order_size(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + assert G.order() == 4 + assert G.size() == 5 + assert G.number_of_edges() == 5 + assert G.number_of_edges("A", "B") == 1 + assert G.number_of_edges("A", "D") == 0 + + def test_copy(self): + G = self.G() + H = G.copy() # copy + assert H.adj == G.adj + assert H.name == G.name + assert H is not G + + def test_subgraph(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + SG = G.subgraph(["A", "B", "D"]) + assert nodes_equal(list(SG), ["A", "B", "D"]) + assert edges_equal(list(SG.edges()), [("A", "B"), ("B", "D")]) + + def test_to_directed(self): + G = self.G() + if not G.is_directed(): + G.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + + DG = G.to_directed() + assert DG is not G # directed copy or copy + + assert DG.is_directed() + assert DG.name == G.name + assert DG.adj == G.adj + assert sorted(DG.out_edges(list("AB"))) == [ + ("A", "B"), + ("A", "C"), + ("B", "A"), + ("B", "C"), + ("B", "D"), + ] + DG.remove_edge("A", "B") + assert DG.has_edge("B", "A") # this removes B-A but not A-B + assert not DG.has_edge("A", "B") + + def test_to_undirected(self): + G = self.G() + if G.is_directed(): + G.add_edges_from( + [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + UG = G.to_undirected() # to_undirected + assert UG is not G + assert not UG.is_directed() + assert G.is_directed() + assert UG.name == G.name + assert UG.adj != G.adj + assert sorted(UG.edges(list("AB"))) == [ + ("A", "B"), + ("A", "C"), + ("B", "C"), + ("B", "D"), + ] + assert sorted(UG.edges(["A", "B"])) == [ + ("A", "B"), + ("A", "C"), + ("B", "C"), + ("B", "D"), + ] + UG.remove_edge("A", "B") + assert not UG.has_edge("B", "A") + assert not UG.has_edge("A", "B") + + def test_neighbors(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + G.add_nodes_from("GJK") + assert sorted(G["A"]) == ["B", "C"] + assert sorted(G.neighbors("A")) == ["B", "C"] + assert sorted(G.neighbors("A")) == ["B", "C"] + assert sorted(G.neighbors("G")) == [] + pytest.raises(nx.NetworkXError, G.neighbors, "j") + + def test_iterators(self): + G = self.G() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")]) + G.add_nodes_from("GJK") + assert sorted(G.nodes()) == ["A", "B", "C", "D", "G", "J", "K"] + assert edges_equal( + G.edges(), [("A", "B"), ("A", "C"), ("B", "D"), ("C", "B"), ("C", "D")] + ) + + assert sorted(v for k, v in G.degree()) == [0, 0, 0, 2, 2, 3, 3] + assert sorted(G.degree(), key=str) == [ + ("A", 2), + ("B", 3), + ("C", 3), + ("D", 2), + ("G", 0), + ("J", 0), + ("K", 0), + ] + assert sorted(G.neighbors("A")) == ["B", "C"] + pytest.raises(nx.NetworkXError, G.neighbors, "X") + G.clear() + assert nx.number_of_nodes(G) == 0 + assert nx.number_of_edges(G) == 0 + + def test_null_subgraph(self): + # Subgraph of a null graph is a null graph + nullgraph = nx.null_graph() + G = nx.null_graph() + H = G.subgraph([]) + assert nx.is_isomorphic(H, nullgraph) + + def test_empty_subgraph(self): + # Subgraph of an empty graph is an empty graph. test 1 + nullgraph = nx.null_graph() + E5 = nx.empty_graph(5) + E10 = nx.empty_graph(10) + H = E10.subgraph([]) + assert nx.is_isomorphic(H, nullgraph) + H = E10.subgraph([1, 2, 3, 4, 5]) + assert nx.is_isomorphic(H, E5) + + def test_complete_subgraph(self): + # Subgraph of a complete graph is a complete graph + K1 = nx.complete_graph(1) + K3 = nx.complete_graph(3) + K5 = nx.complete_graph(5) + H = K5.subgraph([1, 2, 3]) + assert nx.is_isomorphic(H, K3) + + def test_subgraph_nbunch(self): + nullgraph = nx.null_graph() + K1 = nx.complete_graph(1) + K3 = nx.complete_graph(3) + K5 = nx.complete_graph(5) + # Test G.subgraph(nbunch), where nbunch is a single node + H = K5.subgraph(1) + assert nx.is_isomorphic(H, K1) + # Test G.subgraph(nbunch), where nbunch is a set + H = K5.subgraph({1}) + assert nx.is_isomorphic(H, K1) + # Test G.subgraph(nbunch), where nbunch is an iterator + H = K5.subgraph(iter(K3)) + assert nx.is_isomorphic(H, K3) + # Test G.subgraph(nbunch), where nbunch is another graph + H = K5.subgraph(K3) + assert nx.is_isomorphic(H, K3) + H = K5.subgraph([9]) + assert nx.is_isomorphic(H, nullgraph) + + def test_node_tuple_issue(self): + H = self.G() + # Test error handling of tuple as a node + pytest.raises(nx.NetworkXError, H.remove_node, (1, 2)) + H.remove_nodes_from([(1, 2)]) # no error + pytest.raises(nx.NetworkXError, H.neighbors, (1, 2)) diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_coreviews.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_coreviews.py new file mode 100644 index 0000000..24de7f2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_coreviews.py @@ -0,0 +1,362 @@ +import pickle + +import pytest + +import networkx as nx + + +class TestAtlasView: + # node->data + def setup_method(self): + self.d = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}} + self.av = nx.classes.coreviews.AtlasView(self.d) + + def test_pickle(self): + view = self.av + pview = pickle.loads(pickle.dumps(view, -1)) + assert view == pview + assert view.__slots__ == pview.__slots__ + pview = pickle.loads(pickle.dumps(view)) + assert view == pview + assert view.__slots__ == pview.__slots__ + + def test_len(self): + assert len(self.av) == len(self.d) + + def test_iter(self): + assert list(self.av) == list(self.d) + + def test_getitem(self): + assert self.av[1] is self.d[1] + assert self.av[2]["color"] == 1 + pytest.raises(KeyError, self.av.__getitem__, 3) + + def test_copy(self): + avcopy = self.av.copy() + assert avcopy[0] == self.av[0] + assert avcopy == self.av + assert avcopy[0] is not self.av[0] + assert avcopy is not self.av + avcopy[5] = {} + assert avcopy != self.av + + avcopy[0]["ht"] = 4 + assert avcopy[0] != self.av[0] + self.av[0]["ht"] = 4 + assert avcopy[0] == self.av[0] + del self.av[0]["ht"] + + assert not hasattr(self.av, "__setitem__") + + def test_items(self): + assert sorted(self.av.items()) == sorted(self.d.items()) + + def test_str(self): + out = str(self.d) + assert str(self.av) == out + + def test_repr(self): + out = "AtlasView(" + str(self.d) + ")" + assert repr(self.av) == out + + +class TestAdjacencyView: + # node->nbr->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.nd = {0: dd, 1: {}, 2: {"color": 1}} + self.adj = {3: self.nd, 0: {3: dd}, 1: {}, 2: {3: {"color": 1}}} + self.adjview = nx.classes.coreviews.AdjacencyView(self.adj) + + def test_pickle(self): + view = self.adjview + pview = pickle.loads(pickle.dumps(view, -1)) + assert view == pview + assert view.__slots__ == pview.__slots__ + + def test_len(self): + assert len(self.adjview) == len(self.adj) + + def test_iter(self): + assert list(self.adjview) == list(self.adj) + + def test_getitem(self): + assert self.adjview[1] is not self.adj[1] + assert self.adjview[3][0] is self.adjview[0][3] + assert self.adjview[2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][3]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3]["ht"] + + assert not hasattr(self.adjview, "__setitem__") + + def test_items(self): + view_items = sorted((n, dict(d)) for n, d in self.adjview.items()) + assert view_items == sorted(self.adj.items()) + + def test_str(self): + out = str(dict(self.adj)) + assert str(self.adjview) == out + + def test_repr(self): + out = self.adjview.__class__.__name__ + "(" + str(self.adj) + ")" + assert repr(self.adjview) == out + + +class TestMultiAdjacencyView(TestAdjacencyView): + # node->nbr->key->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.kd = {0: dd, 1: {}, 2: {"color": 1}} + self.nd = {3: self.kd, 0: {3: dd}, 1: {0: {}}, 2: {3: {"color": 1}}} + self.adj = {3: self.nd, 0: {3: {3: dd}}, 1: {}, 2: {3: {8: {}}}} + self.adjview = nx.classes.coreviews.MultiAdjacencyView(self.adj) + + def test_getitem(self): + assert self.adjview[1] is not self.adj[1] + assert self.adjview[3][0][3] is self.adjview[0][3][3] + assert self.adjview[3][2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][3][8]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3][8]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3][8]["ht"] + + assert not hasattr(self.adjview, "__setitem__") + + +class TestUnionAtlas: + # node->data + def setup_method(self): + self.s = {0: {"color": "blue", "weight": 1.2}, 1: {}, 2: {"color": 1}} + self.p = {3: {"color": "blue", "weight": 1.2}, 4: {}, 2: {"watch": 2}} + self.av = nx.classes.coreviews.UnionAtlas(self.s, self.p) + + def test_pickle(self): + view = self.av + pview = pickle.loads(pickle.dumps(view, -1)) + assert view == pview + assert view.__slots__ == pview.__slots__ + + def test_len(self): + assert len(self.av) == len(self.s.keys() | self.p.keys()) == 5 + + def test_iter(self): + assert set(self.av) == set(self.s) | set(self.p) + + def test_getitem(self): + assert self.av[0] is self.s[0] + assert self.av[4] is self.p[4] + assert self.av[2]["color"] == 1 + pytest.raises(KeyError, self.av[2].__getitem__, "watch") + pytest.raises(KeyError, self.av.__getitem__, 8) + + def test_copy(self): + avcopy = self.av.copy() + assert avcopy[0] == self.av[0] + assert avcopy[0] is not self.av[0] + assert avcopy is not self.av + avcopy[5] = {} + assert avcopy != self.av + + avcopy[0]["ht"] = 4 + assert avcopy[0] != self.av[0] + self.av[0]["ht"] = 4 + assert avcopy[0] == self.av[0] + del self.av[0]["ht"] + + assert not hasattr(self.av, "__setitem__") + + def test_items(self): + expected = dict(self.p.items()) + expected.update(self.s) + assert sorted(self.av.items()) == sorted(expected.items()) + + def test_str(self): + out = str(dict(self.av)) + assert str(self.av) == out + + def test_repr(self): + out = f"{self.av.__class__.__name__}({self.s}, {self.p})" + assert repr(self.av) == out + + +class TestUnionAdjacency: + # node->nbr->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.nd = {0: dd, 1: {}, 2: {"color": 1}} + self.s = {3: self.nd, 0: {}, 1: {}, 2: {3: {"color": 1}}} + self.p = {3: {}, 0: {3: dd}, 1: {0: {}}, 2: {1: {"color": 1}}} + self.adjview = nx.classes.coreviews.UnionAdjacency(self.s, self.p) + + def test_pickle(self): + view = self.adjview + pview = pickle.loads(pickle.dumps(view, -1)) + assert view == pview + assert view.__slots__ == pview.__slots__ + + def test_len(self): + assert len(self.adjview) == len(self.s) + + def test_iter(self): + assert sorted(self.adjview) == sorted(self.s) + + def test_getitem(self): + assert self.adjview[1] is not self.s[1] + assert self.adjview[3][0] is self.adjview[0][3] + assert self.adjview[2][3]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][3]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3]["ht"] + + assert not hasattr(self.adjview, "__setitem__") + + def test_str(self): + out = str(dict(self.adjview)) + assert str(self.adjview) == out + + def test_repr(self): + clsname = self.adjview.__class__.__name__ + out = f"{clsname}({self.s}, {self.p})" + assert repr(self.adjview) == out + + +class TestUnionMultiInner(TestUnionAdjacency): + # nbr->key->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.kd = {7: {}, "ekey": {}, 9: {"color": 1}} + self.s = {3: self.kd, 0: {7: dd}, 1: {}, 2: {"key": {"color": 1}}} + self.p = {3: {}, 0: {3: dd}, 1: {}, 2: {1: {"span": 2}}} + self.adjview = nx.classes.coreviews.UnionMultiInner(self.s, self.p) + + def test_len(self): + assert len(self.adjview) == len(self.s.keys() | self.p.keys()) == 4 + + def test_getitem(self): + assert self.adjview[1] is not self.s[1] + assert self.adjview[0][7] is self.adjview[0][3] + assert self.adjview[2]["key"]["color"] == 1 + assert self.adjview[2][1]["span"] == 2 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + pytest.raises(KeyError, self.adjview[1].__getitem__, "key") + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][1]["width"] = 8 + assert avcopy[2] != self.adjview[2] + self.adjview[2][1]["width"] = 8 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][1]["width"] + + assert not hasattr(self.adjview, "__setitem__") + assert hasattr(avcopy, "__setitem__") + + +class TestUnionMultiAdjacency(TestUnionAdjacency): + # node->nbr->key->data + def setup_method(self): + dd = {"color": "blue", "weight": 1.2} + self.kd = {7: {}, 8: {}, 9: {"color": 1}} + self.nd = {3: self.kd, 0: {9: dd}, 1: {8: {}}, 2: {9: {"color": 1}}} + self.s = {3: self.nd, 0: {3: {7: dd}}, 1: {}, 2: {3: {8: {}}}} + self.p = {3: {}, 0: {3: {9: dd}}, 1: {}, 2: {1: {8: {}}}} + self.adjview = nx.classes.coreviews.UnionMultiAdjacency(self.s, self.p) + + def test_getitem(self): + assert self.adjview[1] is not self.s[1] + assert self.adjview[3][0][9] is self.adjview[0][3][9] + assert self.adjview[3][2][9]["color"] == 1 + pytest.raises(KeyError, self.adjview.__getitem__, 4) + + def test_copy(self): + avcopy = self.adjview.copy() + assert avcopy[0] == self.adjview[0] + assert avcopy[0] is not self.adjview[0] + + avcopy[2][3][8]["ht"] = 4 + assert avcopy[2] != self.adjview[2] + self.adjview[2][3][8]["ht"] = 4 + assert avcopy[2] == self.adjview[2] + del self.adjview[2][3][8]["ht"] + + assert not hasattr(self.adjview, "__setitem__") + assert hasattr(avcopy, "__setitem__") + + +class TestFilteredGraphs: + def setup_method(self): + self.Graphs = [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] + + def test_hide_show_nodes(self): + SubGraph = nx.subgraph_view + for Graph in self.Graphs: + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1])) + assert SG.nodes == RG.nodes + assert SG.edges == RG.edges + SGC = SG.copy() + RGC = RG.copy() + assert SGC.nodes == RGC.nodes + assert SGC.edges == RGC.edges + + def test_str_repr(self): + SubGraph = nx.subgraph_view + for Graph in self.Graphs: + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1])) + str(SG.adj) + str(RG.adj) + repr(SG.adj) + repr(RG.adj) + str(SG.adj[2]) + str(RG.adj[2]) + repr(SG.adj[2]) + repr(RG.adj[2]) + + def test_copy(self): + SubGraph = nx.subgraph_view + for Graph in self.Graphs: + G = nx.path_graph(4, Graph) + SG = G.subgraph([2, 3]) + RG = SubGraph(G, filter_node=nx.filters.hide_nodes([0, 1])) + RsG = SubGraph(G, filter_node=nx.filters.show_nodes([2, 3])) + assert G.adj.copy() == G.adj + assert G.adj[2].copy() == G.adj[2] + assert SG.adj.copy() == SG.adj + assert SG.adj[2].copy() == SG.adj[2] + assert RG.adj.copy() == RG.adj + assert RG.adj[2].copy() == RG.adj[2] + assert RsG.adj.copy() == RsG.adj + assert RsG.adj[2].copy() == RsG.adj[2] diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_digraph.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_digraph.py new file mode 100644 index 0000000..b9972f9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_digraph.py @@ -0,0 +1,331 @@ +import pytest + +import networkx as nx +from networkx.utils import nodes_equal + +from .test_graph import BaseAttrGraphTester, BaseGraphTester +from .test_graph import TestEdgeSubgraph as _TestGraphEdgeSubgraph +from .test_graph import TestGraph as _TestGraph + + +class BaseDiGraphTester(BaseGraphTester): + def test_has_successor(self): + G = self.K3 + assert G.has_successor(0, 1) + assert not G.has_successor(0, -1) + + def test_successors(self): + G = self.K3 + assert sorted(G.successors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.successors(-1) + + def test_has_predecessor(self): + G = self.K3 + assert G.has_predecessor(0, 1) + assert not G.has_predecessor(0, -1) + + def test_predecessors(self): + G = self.K3 + assert sorted(G.predecessors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.predecessors(-1) + + def test_edges(self): + G = self.K3 + assert sorted(G.edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.edges(0)) == [(0, 1), (0, 2)] + assert sorted(G.edges([0, 1])) == [(0, 1), (0, 2), (1, 0), (1, 2)] + with pytest.raises(nx.NetworkXError): + G.edges(-1) + + def test_out_edges(self): + G = self.K3 + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] + with pytest.raises(nx.NetworkXError): + G.out_edges(-1) + + def test_out_edges_dir(self): + G = self.P3 + assert sorted(G.out_edges()) == [(0, 1), (1, 2)] + assert sorted(G.out_edges(0)) == [(0, 1)] + assert sorted(G.out_edges(2)) == [] + + def test_out_edges_data(self): + G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) + assert sorted(G.out_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] + assert sorted(G.out_edges(0, data=True)) == [(0, 1, {"data": 0})] + assert sorted(G.out_edges(data="data")) == [(0, 1, 0), (1, 0, None)] + assert sorted(G.out_edges(0, data="data")) == [(0, 1, 0)] + + def test_in_edges_dir(self): + G = self.P3 + assert sorted(G.in_edges()) == [(0, 1), (1, 2)] + assert sorted(G.in_edges(0)) == [] + assert sorted(G.in_edges(2)) == [(1, 2)] + + def test_in_edges_data(self): + G = nx.DiGraph([(0, 1, {"data": 0}), (1, 0, {})]) + assert sorted(G.in_edges(data=True)) == [(0, 1, {"data": 0}), (1, 0, {})] + assert sorted(G.in_edges(1, data=True)) == [(0, 1, {"data": 0})] + assert sorted(G.in_edges(data="data")) == [(0, 1, 0), (1, 0, None)] + assert sorted(G.in_edges(1, data="data")) == [(0, 1, 0)] + + def test_degree(self): + G = self.K3 + assert sorted(G.degree()) == [(0, 4), (1, 4), (2, 4)] + assert dict(G.degree()) == {0: 4, 1: 4, 2: 4} + assert G.degree(0) == 4 + assert list(G.degree(iter([0]))) == [(0, 4)] # run through iterator + + def test_in_degree(self): + G = self.K3 + assert sorted(G.in_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.in_degree()) == {0: 2, 1: 2, 2: 2} + assert G.in_degree(0) == 2 + assert list(G.in_degree(iter([0]))) == [(0, 2)] # run through iterator + + def test_out_degree(self): + G = self.K3 + assert sorted(G.out_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.out_degree()) == {0: 2, 1: 2, 2: 2} + assert G.out_degree(0) == 2 + assert list(G.out_degree(iter([0]))) == [(0, 2)] + + def test_size(self): + G = self.K3 + assert G.size() == 6 + assert G.number_of_edges() == 6 + + def test_to_undirected_reciprocal(self): + G = self.Graph() + G.add_edge(1, 2) + assert G.to_undirected().has_edge(1, 2) + assert not G.to_undirected(reciprocal=True).has_edge(1, 2) + G.add_edge(2, 1) + assert G.to_undirected(reciprocal=True).has_edge(1, 2) + + def test_reverse_copy(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + R = G.reverse() + assert sorted(R.edges()) == [(1, 0), (2, 1)] + R.remove_edge(1, 0) + assert sorted(R.edges()) == [(2, 1)] + assert sorted(G.edges()) == [(0, 1), (1, 2)] + + def test_reverse_nocopy(self): + G = nx.DiGraph([(0, 1), (1, 2)]) + R = G.reverse(copy=False) + assert sorted(R.edges()) == [(1, 0), (2, 1)] + with pytest.raises(nx.NetworkXError): + R.remove_edge(1, 0) + + def test_reverse_hashable(self): + class Foo: + pass + + x = Foo() + y = Foo() + G = nx.DiGraph() + G.add_edge(x, y) + assert nodes_equal(G.nodes(), G.reverse().nodes()) + assert [(y, x)] == list(G.reverse().edges()) + + def test_di_cache_reset(self): + G = self.K3.copy() + old_succ = G.succ + assert id(G.succ) == id(old_succ) + old_adj = G.adj + assert id(G.adj) == id(old_adj) + + G._succ = {} + assert id(G.succ) != id(old_succ) + assert id(G.adj) != id(old_adj) + + old_pred = G.pred + assert id(G.pred) == id(old_pred) + G._pred = {} + assert id(G.pred) != id(old_pred) + + def test_di_attributes_cached(self): + G = self.K3.copy() + assert id(G.in_edges) == id(G.in_edges) + assert id(G.out_edges) == id(G.out_edges) + assert id(G.in_degree) == id(G.in_degree) + assert id(G.out_degree) == id(G.out_degree) + assert id(G.succ) == id(G.succ) + assert id(G.pred) == id(G.pred) + + +class BaseAttrDiGraphTester(BaseDiGraphTester, BaseAttrGraphTester): + def test_edges_data(self): + G = self.K3 + all_edges = [ + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + ] + assert sorted(G.edges(data=True)) == all_edges + assert sorted(G.edges(0, data=True)) == all_edges[:2] + assert sorted(G.edges([0, 1], data=True)) == all_edges[:4] + with pytest.raises(nx.NetworkXError): + G.edges(-1, True) + + def test_in_degree_weighted(self): + G = self.K3.copy() + G.add_edge(0, 1, weight=0.3, other=1.2) + assert sorted(G.in_degree(weight="weight")) == [(0, 2), (1, 1.3), (2, 2)] + assert dict(G.in_degree(weight="weight")) == {0: 2, 1: 1.3, 2: 2} + assert G.in_degree(1, weight="weight") == 1.3 + assert sorted(G.in_degree(weight="other")) == [(0, 2), (1, 2.2), (2, 2)] + assert dict(G.in_degree(weight="other")) == {0: 2, 1: 2.2, 2: 2} + assert G.in_degree(1, weight="other") == 2.2 + assert list(G.in_degree(iter([1]), weight="other")) == [(1, 2.2)] + + def test_out_degree_weighted(self): + G = self.K3.copy() + G.add_edge(0, 1, weight=0.3, other=1.2) + assert sorted(G.out_degree(weight="weight")) == [(0, 1.3), (1, 2), (2, 2)] + assert dict(G.out_degree(weight="weight")) == {0: 1.3, 1: 2, 2: 2} + assert G.out_degree(0, weight="weight") == 1.3 + assert sorted(G.out_degree(weight="other")) == [(0, 2.2), (1, 2), (2, 2)] + assert dict(G.out_degree(weight="other")) == {0: 2.2, 1: 2, 2: 2} + assert G.out_degree(0, weight="other") == 2.2 + assert list(G.out_degree(iter([0]), weight="other")) == [(0, 2.2)] + + +class TestDiGraph(BaseAttrDiGraphTester, _TestGraph): + """Tests specific to dict-of-dict-of-dict digraph data structure""" + + def setup_method(self): + self.Graph = nx.DiGraph + # build dict-of-dict-of-dict K3 + ed1, ed2, ed3, ed4, ed5, ed6 = ({}, {}, {}, {}, {}, {}) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed3, 2: ed4}, 2: {0: ed5, 1: ed6}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._succ = self.k3adj # K3._adj is synced with K3._succ + self.K3._pred = {0: {1: ed3, 2: ed5}, 1: {0: ed1, 2: ed6}, 2: {0: ed2, 1: ed4}} + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + ed1, ed2 = ({}, {}) + self.P3 = self.Graph() + self.P3._succ = {0: {1: ed1}, 1: {2: ed2}, 2: {}} + self.P3._pred = {0: {}, 1: {0: ed1}, 2: {1: ed2}} + # P3._adj is synced with P3._succ + self.P3._node = {} + self.P3._node[0] = {} + self.P3._node[1] = {} + self.P3._node[2] = {} + + def test_data_input(self): + G = self.Graph({1: [2], 2: [1]}, name="test") + assert G.name == "test" + assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})] + assert sorted(G.succ.items()) == [(1, {2: {}}), (2, {1: {}})] + assert sorted(G.pred.items()) == [(1, {2: {}}), (2, {1: {}})] + + def test_add_edge(self): + G = self.Graph() + G.add_edge(0, 1) + assert G.adj == {0: {1: {}}, 1: {}} + assert G.succ == {0: {1: {}}, 1: {}} + assert G.pred == {0: {}, 1: {0: {}}} + G = self.Graph() + G.add_edge(*(0, 1)) + assert G.adj == {0: {1: {}}, 1: {}} + assert G.succ == {0: {1: {}}, 1: {}} + assert G.pred == {0: {}, 1: {0: {}}} + with pytest.raises(ValueError, match="None cannot be a node"): + G.add_edge(None, 3) + + def test_add_edges_from(self): + G = self.Graph() + G.add_edges_from([(0, 1), (0, 2, {"data": 3})], data=2) + assert G.adj == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} + assert G.succ == {0: {1: {"data": 2}, 2: {"data": 3}}, 1: {}, 2: {}} + assert G.pred == {0: {}, 1: {0: {"data": 2}}, 2: {0: {"data": 3}}} + + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0,)]) # too few in tuple + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple + with pytest.raises(TypeError): + G.add_edges_from([0]) # not a tuple + with pytest.raises(ValueError, match="None cannot be a node"): + G.add_edges_from([(None, 3), (3, 2)]) + + def test_remove_edge(self): + G = self.K3.copy() + G.remove_edge(0, 1) + assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}} + assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) + + def test_remove_edges_from(self): + G = self.K3.copy() + G.remove_edges_from([(0, 1)]) + assert G.succ == {0: {2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}} + assert G.pred == {0: {1: {}, 2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + G.remove_edges_from([(0, 0)]) # silent fail + + def test_clear(self): + G = self.K3 + G.graph["name"] = "K3" + G.clear() + assert list(G.nodes) == [] + assert G.succ == {} + assert G.pred == {} + assert G.graph == {} + + def test_clear_edges(self): + G = self.K3 + G.graph["name"] = "K3" + nodes = list(G.nodes) + G.clear_edges() + assert list(G.nodes) == nodes + expected = {0: {}, 1: {}, 2: {}} + assert G.succ == expected + assert G.pred == expected + assert list(G.edges) == [] + assert G.graph["name"] == "K3" + + +class TestEdgeSubgraph(_TestGraphEdgeSubgraph): + """Unit tests for the :meth:`DiGraph.edge_subgraph` method.""" + + def setup_method(self): + # Create a doubly-linked path graph on five nodes. + G = nx.DiGraph(nx.path_graph(5)) + # Add some node, edge, and graph attributes. + for i in range(5): + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" + # Get the subgraph induced by the first and last edges. + self.G = G + self.H = G.edge_subgraph([(0, 1), (3, 4)]) + + def test_pred_succ(self): + """Test that nodes are added to predecessors and successors. + + For more information, see GitHub issue #2370. + + """ + G = nx.DiGraph() + G.add_edge(0, 1) + H = G.edge_subgraph([(0, 1)]) + assert list(H.predecessors(0)) == [] + assert list(H.successors(0)) == [1] + assert list(H.predecessors(1)) == [0] + assert list(H.successors(1)) == [] diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_digraph_historical.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_digraph_historical.py new file mode 100644 index 0000000..ff9f9e9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_digraph_historical.py @@ -0,0 +1,110 @@ +"""Original NetworkX graph tests""" + +import pytest + +import networkx as nx + +from .historical_tests import HistoricalTests + + +class TestDiGraphHistorical(HistoricalTests): + @classmethod + def setup_class(cls): + HistoricalTests.setup_class() + cls.G = nx.DiGraph + + def test_in_degree(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + + assert sorted(d for n, d in G.in_degree()) == [0, 0, 0, 0, 1, 2, 2] + assert dict(G.in_degree()) == { + "A": 0, + "C": 2, + "B": 1, + "D": 2, + "G": 0, + "K": 0, + "J": 0, + } + + def test_out_degree(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(v for k, v in G.in_degree()) == [0, 0, 0, 0, 1, 2, 2] + assert dict(G.out_degree()) == { + "A": 2, + "C": 1, + "B": 2, + "D": 0, + "G": 0, + "K": 0, + "J": 0, + } + + def test_degree_digraph(self): + H = nx.DiGraph() + H.add_edges_from([(1, 24), (1, 2)]) + assert sorted(d for n, d in H.in_degree([1, 24])) == [0, 1] + assert sorted(d for n, d in H.out_degree([1, 24])) == [0, 2] + assert sorted(d for n, d in H.degree([1, 24])) == [1, 2] + + def test_neighbors(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + + assert sorted(G.neighbors("C")) == ["D"] + assert sorted(G["C"]) == ["D"] + assert sorted(G.neighbors("A")) == ["B", "C"] + pytest.raises(nx.NetworkXError, G.neighbors, "j") + pytest.raises(nx.NetworkXError, G.neighbors, "j") + + def test_successors(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(G.successors("A")) == ["B", "C"] + assert sorted(G.successors("A")) == ["B", "C"] + assert sorted(G.successors("G")) == [] + assert sorted(G.successors("D")) == [] + assert sorted(G.successors("G")) == [] + pytest.raises(nx.NetworkXError, G.successors, "j") + pytest.raises(nx.NetworkXError, G.successors, "j") + + def test_predecessors(self): + G = self.G() + G.add_nodes_from("GJK") + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "D"), ("B", "C"), ("C", "D")]) + assert sorted(G.predecessors("C")) == ["A", "B"] + assert sorted(G.predecessors("C")) == ["A", "B"] + assert sorted(G.predecessors("G")) == [] + assert sorted(G.predecessors("A")) == [] + assert sorted(G.predecessors("G")) == [] + assert sorted(G.predecessors("A")) == [] + assert sorted(G.successors("D")) == [] + + pytest.raises(nx.NetworkXError, G.predecessors, "j") + pytest.raises(nx.NetworkXError, G.predecessors, "j") + + def test_reverse(self): + G = nx.complete_graph(10) + H = G.to_directed() + HR = H.reverse() + assert nx.is_isomorphic(H, HR) + assert sorted(H.edges()) == sorted(HR.edges()) + + def test_reverse2(self): + H = nx.DiGraph() + foo = [H.add_edge(u, u + 1) for u in range(5)] + HR = H.reverse() + for u in range(5): + assert HR.has_edge(u + 1, u) + + def test_reverse3(self): + H = nx.DiGraph() + H.add_nodes_from([1, 2, 3, 4]) + HR = H.reverse() + assert sorted(HR.nodes()) == [1, 2, 3, 4] diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_filters.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_filters.py new file mode 100644 index 0000000..2da5911 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_filters.py @@ -0,0 +1,177 @@ +import pytest + +import networkx as nx + + +class TestFilterFactory: + def test_no_filter(self): + nf = nx.filters.no_filter + assert nf() + assert nf(1) + assert nf(2, 1) + + def test_hide_nodes(self): + f = nx.classes.filters.hide_nodes([1, 2, 3]) + assert not f(1) + assert not f(2) + assert not f(3) + assert f(4) + assert f(0) + assert f("a") + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f) + + def test_show_nodes(self): + f = nx.classes.filters.show_nodes([1, 2, 3]) + assert f(1) + assert f(2) + assert f(3) + assert not f(4) + assert not f(0) + assert not f("a") + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f) + + def test_hide_edges(self): + factory = nx.classes.filters.hide_edges + f = factory([(1, 2), (3, 4)]) + assert not f(1, 2) + assert not f(3, 4) + assert not f(4, 3) + assert f(2, 3) + assert f(0, -1) + assert f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) + + def test_show_edges(self): + factory = nx.classes.filters.show_edges + f = factory([(1, 2), (3, 4)]) + assert f(1, 2) + assert f(3, 4) + assert f(4, 3) + assert not f(2, 3) + assert not f(0, -1) + assert not f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) + + def test_hide_diedges(self): + factory = nx.classes.filters.hide_diedges + f = factory([(1, 2), (3, 4)]) + assert not f(1, 2) + assert not f(3, 4) + assert f(4, 3) + assert f(2, 3) + assert f(0, -1) + assert f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) + + def test_show_diedges(self): + factory = nx.classes.filters.show_diedges + f = factory([(1, 2), (3, 4)]) + assert f(1, 2) + assert f(3, 4) + assert not f(4, 3) + assert not f(2, 3) + assert not f(0, -1) + assert not f("a", "b") + pytest.raises(TypeError, f, 1, 2, 3) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2, 3)]) + + def test_hide_multiedges(self): + factory = nx.classes.filters.hide_multiedges + f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) + assert not f(1, 2, 0) + assert not f(1, 2, 1) + assert f(1, 2, 2) + assert f(3, 4, 0) + assert not f(3, 4, 1) + assert not f(4, 3, 1) + assert f(4, 3, 0) + assert f(2, 3, 0) + assert f(0, -1, 0) + assert f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) + + def test_show_multiedges(self): + factory = nx.classes.filters.show_multiedges + f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) + assert f(1, 2, 0) + assert f(1, 2, 1) + assert not f(1, 2, 2) + assert not f(3, 4, 0) + assert f(3, 4, 1) + assert f(4, 3, 1) + assert not f(4, 3, 0) + assert not f(2, 3, 0) + assert not f(0, -1, 0) + assert not f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) + + def test_hide_multidiedges(self): + factory = nx.classes.filters.hide_multidiedges + f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) + assert not f(1, 2, 0) + assert not f(1, 2, 1) + assert f(1, 2, 2) + assert f(3, 4, 0) + assert not f(3, 4, 1) + assert f(4, 3, 1) + assert f(4, 3, 0) + assert f(2, 3, 0) + assert f(0, -1, 0) + assert f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) + + def test_show_multidiedges(self): + factory = nx.classes.filters.show_multidiedges + f = factory([(1, 2, 0), (3, 4, 1), (1, 2, 1)]) + assert f(1, 2, 0) + assert f(1, 2, 1) + assert not f(1, 2, 2) + assert not f(3, 4, 0) + assert f(3, 4, 1) + assert not f(4, 3, 1) + assert not f(4, 3, 0) + assert not f(2, 3, 0) + assert not f(0, -1, 0) + assert not f("a", "b", 0) + pytest.raises(TypeError, f, 1, 2, 3, 4) + pytest.raises(TypeError, f, 1, 2) + pytest.raises(TypeError, f, 1) + pytest.raises(TypeError, f) + pytest.raises(TypeError, factory, [1, 2, 3]) + pytest.raises(ValueError, factory, [(1, 2)]) + pytest.raises(ValueError, factory, [(1, 2, 3, 4)]) diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_function.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_function.py new file mode 100644 index 0000000..9ace420 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_function.py @@ -0,0 +1,1035 @@ +import random + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +def test_degree_histogram_empty(): + G = nx.Graph() + assert nx.degree_histogram(G) == [] + + +class TestFunction: + def setup_method(self): + self.G = nx.Graph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}, name="Test") + self.Gdegree = {0: 3, 1: 2, 2: 2, 3: 1, 4: 0} + self.Gnodes = list(range(5)) + self.Gedges = [(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)] + self.DG = nx.DiGraph({0: [1, 2, 3], 1: [1, 2, 0], 4: []}) + self.DGin_degree = {0: 1, 1: 2, 2: 2, 3: 1, 4: 0} + self.DGout_degree = {0: 3, 1: 3, 2: 0, 3: 0, 4: 0} + self.DGnodes = list(range(5)) + self.DGedges = [(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)] + + def test_nodes(self): + assert nodes_equal(self.G.nodes(), list(nx.nodes(self.G))) + assert nodes_equal(self.DG.nodes(), list(nx.nodes(self.DG))) + + def test_edges(self): + assert edges_equal(self.G.edges(), list(nx.edges(self.G))) + assert sorted(self.DG.edges()) == sorted(nx.edges(self.DG)) + assert edges_equal( + self.G.edges(nbunch=[0, 1, 3]), list(nx.edges(self.G, nbunch=[0, 1, 3])) + ) + assert sorted(self.DG.edges(nbunch=[0, 1, 3])) == sorted( + nx.edges(self.DG, nbunch=[0, 1, 3]) + ) + + def test_degree(self): + assert edges_equal(self.G.degree(), list(nx.degree(self.G))) + assert sorted(self.DG.degree()) == sorted(nx.degree(self.DG)) + assert edges_equal( + self.G.degree(nbunch=[0, 1]), list(nx.degree(self.G, nbunch=[0, 1])) + ) + assert sorted(self.DG.degree(nbunch=[0, 1])) == sorted( + nx.degree(self.DG, nbunch=[0, 1]) + ) + assert edges_equal( + self.G.degree(weight="weight"), list(nx.degree(self.G, weight="weight")) + ) + assert sorted(self.DG.degree(weight="weight")) == sorted( + nx.degree(self.DG, weight="weight") + ) + + def test_neighbors(self): + assert list(self.G.neighbors(1)) == list(nx.neighbors(self.G, 1)) + assert list(self.DG.neighbors(1)) == list(nx.neighbors(self.DG, 1)) + + def test_number_of_nodes(self): + assert self.G.number_of_nodes() == nx.number_of_nodes(self.G) + assert self.DG.number_of_nodes() == nx.number_of_nodes(self.DG) + + def test_number_of_edges(self): + assert self.G.number_of_edges() == nx.number_of_edges(self.G) + assert self.DG.number_of_edges() == nx.number_of_edges(self.DG) + + def test_is_directed(self): + assert self.G.is_directed() == nx.is_directed(self.G) + assert self.DG.is_directed() == nx.is_directed(self.DG) + + def test_add_star(self): + G = self.G.copy() + nlist = [12, 13, 14, 15] + nx.add_star(G, nlist) + assert edges_equal(G.edges(nlist), [(12, 13), (12, 14), (12, 15)]) + + G = self.G.copy() + nx.add_star(G, nlist, weight=2.0) + assert edges_equal( + G.edges(nlist, data=True), + [ + (12, 13, {"weight": 2.0}), + (12, 14, {"weight": 2.0}), + (12, 15, {"weight": 2.0}), + ], + ) + + G = self.G.copy() + nlist = [12] + nx.add_star(G, nlist) + assert nodes_equal(G, list(self.G) + nlist) + + G = self.G.copy() + nlist = [] + nx.add_star(G, nlist) + assert nodes_equal(G.nodes, self.Gnodes) + assert edges_equal(G.edges, self.G.edges) + + def test_add_path(self): + G = self.G.copy() + nlist = [12, 13, 14, 15] + nx.add_path(G, nlist) + assert edges_equal(G.edges(nlist), [(12, 13), (13, 14), (14, 15)]) + G = self.G.copy() + nx.add_path(G, nlist, weight=2.0) + assert edges_equal( + G.edges(nlist, data=True), + [ + (12, 13, {"weight": 2.0}), + (13, 14, {"weight": 2.0}), + (14, 15, {"weight": 2.0}), + ], + ) + + G = self.G.copy() + nlist = ["node"] + nx.add_path(G, nlist) + assert edges_equal(G.edges(nlist), []) + assert nodes_equal(G, list(self.G) + ["node"]) + + G = self.G.copy() + nlist = iter(["node"]) + nx.add_path(G, nlist) + assert edges_equal(G.edges(["node"]), []) + assert nodes_equal(G, list(self.G) + ["node"]) + + G = self.G.copy() + nlist = [12] + nx.add_path(G, nlist) + assert edges_equal(G.edges(nlist), []) + assert nodes_equal(G, list(self.G) + [12]) + + G = self.G.copy() + nlist = iter([12]) + nx.add_path(G, nlist) + assert edges_equal(G.edges([12]), []) + assert nodes_equal(G, list(self.G) + [12]) + + G = self.G.copy() + nlist = [] + nx.add_path(G, nlist) + assert edges_equal(G.edges, self.G.edges) + assert nodes_equal(G, list(self.G)) + + G = self.G.copy() + nlist = iter([]) + nx.add_path(G, nlist) + assert edges_equal(G.edges, self.G.edges) + assert nodes_equal(G, list(self.G)) + + def test_add_cycle(self): + G = self.G.copy() + nlist = [12, 13, 14, 15] + oklists = [ + [(12, 13), (12, 15), (13, 14), (14, 15)], + [(12, 13), (13, 14), (14, 15), (15, 12)], + ] + nx.add_cycle(G, nlist) + assert sorted(G.edges(nlist)) in oklists + G = self.G.copy() + oklists = [ + [ + (12, 13, {"weight": 1.0}), + (12, 15, {"weight": 1.0}), + (13, 14, {"weight": 1.0}), + (14, 15, {"weight": 1.0}), + ], + [ + (12, 13, {"weight": 1.0}), + (13, 14, {"weight": 1.0}), + (14, 15, {"weight": 1.0}), + (15, 12, {"weight": 1.0}), + ], + ] + nx.add_cycle(G, nlist, weight=1.0) + assert sorted(G.edges(nlist, data=True)) in oklists + + G = self.G.copy() + nlist = [12] + nx.add_cycle(G, nlist) + assert nodes_equal(G, list(self.G) + nlist) + + G = self.G.copy() + nlist = [] + nx.add_cycle(G, nlist) + assert nodes_equal(G.nodes, self.Gnodes) + assert edges_equal(G.edges, self.G.edges) + + def test_subgraph(self): + assert ( + self.G.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.G, [0, 1, 2, 4]).adj + ) + assert ( + self.DG.subgraph([0, 1, 2, 4]).adj == nx.subgraph(self.DG, [0, 1, 2, 4]).adj + ) + assert ( + self.G.subgraph([0, 1, 2, 4]).adj + == nx.induced_subgraph(self.G, [0, 1, 2, 4]).adj + ) + assert ( + self.DG.subgraph([0, 1, 2, 4]).adj + == nx.induced_subgraph(self.DG, [0, 1, 2, 4]).adj + ) + # subgraph-subgraph chain is allowed in function interface + H = nx.induced_subgraph(self.G.subgraph([0, 1, 2, 4]), [0, 1, 4]) + assert H._graph is not self.G + assert H.adj == self.G.subgraph([0, 1, 4]).adj + + def test_edge_subgraph(self): + assert ( + self.G.edge_subgraph([(1, 2), (0, 3)]).adj + == nx.edge_subgraph(self.G, [(1, 2), (0, 3)]).adj + ) + assert ( + self.DG.edge_subgraph([(1, 2), (0, 3)]).adj + == nx.edge_subgraph(self.DG, [(1, 2), (0, 3)]).adj + ) + + def test_create_empty_copy(self): + G = nx.create_empty_copy(self.G, with_data=False) + assert nodes_equal(G, list(self.G)) + assert G.graph == {} + assert G._node == {}.fromkeys(self.G.nodes(), {}) + assert G._adj == {}.fromkeys(self.G.nodes(), {}) + G = nx.create_empty_copy(self.G) + assert nodes_equal(G, list(self.G)) + assert G.graph == self.G.graph + assert G._node == self.G._node + assert G._adj == {}.fromkeys(self.G.nodes(), {}) + + def test_degree_histogram(self): + assert nx.degree_histogram(self.G) == [1, 1, 1, 1, 1] + + def test_density(self): + assert nx.density(self.G) == 0.5 + assert nx.density(self.DG) == 0.3 + G = nx.Graph() + G.add_node(1) + assert nx.density(G) == 0.0 + + def test_density_selfloop(self): + G = nx.Graph() + G.add_edge(1, 1) + assert nx.density(G) == 0.0 + G.add_edge(1, 2) + assert nx.density(G) == 2.0 + + def test_freeze(self): + G = nx.freeze(self.G) + assert G.frozen + pytest.raises(nx.NetworkXError, G.add_node, 1) + pytest.raises(nx.NetworkXError, G.add_nodes_from, [1]) + pytest.raises(nx.NetworkXError, G.remove_node, 1) + pytest.raises(nx.NetworkXError, G.remove_nodes_from, [1]) + pytest.raises(nx.NetworkXError, G.add_edge, 1, 2) + pytest.raises(nx.NetworkXError, G.add_edges_from, [(1, 2)]) + pytest.raises(nx.NetworkXError, G.remove_edge, 1, 2) + pytest.raises(nx.NetworkXError, G.remove_edges_from, [(1, 2)]) + pytest.raises(nx.NetworkXError, G.clear_edges) + pytest.raises(nx.NetworkXError, G.clear) + + def test_is_frozen(self): + assert not nx.is_frozen(self.G) + G = nx.freeze(self.G) + assert G.frozen == nx.is_frozen(self.G) + assert G.frozen + + def test_node_attributes_are_still_mutable_on_frozen_graph(self): + G = nx.freeze(nx.path_graph(3)) + node = G.nodes[0] + node["node_attribute"] = True + assert node["node_attribute"] is True + + def test_edge_attributes_are_still_mutable_on_frozen_graph(self): + G = nx.freeze(nx.path_graph(3)) + edge = G.edges[(0, 1)] + edge["edge_attribute"] = True + assert edge["edge_attribute"] is True + + def test_neighbors_complete_graph(self): + graph = nx.complete_graph(100) + pop = random.sample(list(graph), 1) + nbors = list(nx.neighbors(graph, pop[0])) + # should be all the other vertices in the graph + assert len(nbors) == len(graph) - 1 + + graph = nx.path_graph(100) + node = random.sample(list(graph), 1)[0] + nbors = list(nx.neighbors(graph, node)) + # should be all the other vertices in the graph + if node != 0 and node != 99: + assert len(nbors) == 2 + else: + assert len(nbors) == 1 + + # create a star graph with 99 outer nodes + graph = nx.star_graph(99) + nbors = list(nx.neighbors(graph, 0)) + assert len(nbors) == 99 + + def test_non_neighbors(self): + graph = nx.complete_graph(100) + pop = random.sample(list(graph), 1) + nbors = nx.non_neighbors(graph, pop[0]) + # should be all the other vertices in the graph + assert len(nbors) == 0 + + graph = nx.path_graph(100) + node = random.sample(list(graph), 1)[0] + nbors = nx.non_neighbors(graph, node) + # should be all the other vertices in the graph + if node != 0 and node != 99: + assert len(nbors) == 97 + else: + assert len(nbors) == 98 + + # create a star graph with 99 outer nodes + graph = nx.star_graph(99) + nbors = nx.non_neighbors(graph, 0) + assert len(nbors) == 0 + + # disconnected graph + graph = nx.Graph() + graph.add_nodes_from(range(10)) + nbors = nx.non_neighbors(graph, 0) + assert len(nbors) == 9 + + def test_non_edges(self): + # All possible edges exist + graph = nx.complete_graph(5) + nedges = list(nx.non_edges(graph)) + assert len(nedges) == 0 + + graph = nx.path_graph(4) + expected = [(0, 2), (0, 3), (1, 3)] + nedges = list(nx.non_edges(graph)) + for u, v in expected: + assert (u, v) in nedges or (v, u) in nedges + + graph = nx.star_graph(4) + expected = [(1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + nedges = list(nx.non_edges(graph)) + for u, v in expected: + assert (u, v) in nedges or (v, u) in nedges + + # Directed graphs + graph = nx.DiGraph() + graph.add_edges_from([(0, 2), (2, 0), (2, 1)]) + expected = [(0, 1), (1, 0), (1, 2)] + nedges = list(nx.non_edges(graph)) + for e in expected: + assert e in nedges + + def test_is_weighted(self): + G = nx.Graph() + assert not nx.is_weighted(G) + + G = nx.path_graph(4) + assert not nx.is_weighted(G) + assert not nx.is_weighted(G, (2, 3)) + + G.add_node(4) + G.add_edge(3, 4, weight=4) + assert not nx.is_weighted(G) + assert nx.is_weighted(G, (3, 4)) + + G = nx.DiGraph() + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -5), + ("0", "2", 2), + ("1", "2", 4), + ("2", "3", 1), + ] + ) + assert nx.is_weighted(G) + assert nx.is_weighted(G, ("1", "0")) + + G = G.to_undirected() + assert nx.is_weighted(G) + assert nx.is_weighted(G, ("1", "0")) + + pytest.raises(nx.NetworkXError, nx.is_weighted, G, (1, 2)) + + def test_is_negatively_weighted(self): + G = nx.Graph() + assert not nx.is_negatively_weighted(G) + + G.add_node(1) + G.add_nodes_from([2, 3, 4, 5]) + assert not nx.is_negatively_weighted(G) + + G.add_edge(1, 2, weight=4) + assert not nx.is_negatively_weighted(G, (1, 2)) + + G.add_edges_from([(1, 3), (2, 4), (2, 6)]) + G[1][3]["color"] = "blue" + assert not nx.is_negatively_weighted(G) + assert not nx.is_negatively_weighted(G, (1, 3)) + + G[2][4]["weight"] = -2 + assert nx.is_negatively_weighted(G, (2, 4)) + assert nx.is_negatively_weighted(G) + + G = nx.DiGraph() + G.add_weighted_edges_from( + [ + ("0", "3", 3), + ("0", "1", -5), + ("1", "0", -2), + ("0", "2", 2), + ("1", "2", -3), + ("2", "3", 1), + ] + ) + assert nx.is_negatively_weighted(G) + assert not nx.is_negatively_weighted(G, ("0", "3")) + assert nx.is_negatively_weighted(G, ("1", "0")) + + pytest.raises(nx.NetworkXError, nx.is_negatively_weighted, G, (1, 4)) + + +class TestCommonNeighbors: + @classmethod + def setup_class(cls): + cls.func = staticmethod(nx.common_neighbors) + + def test_func(G, u, v, expected): + result = sorted(cls.func(G, u, v)) + assert result == expected + + cls.test = staticmethod(test_func) + + def test_K5(self): + G = nx.complete_graph(5) + self.test(G, 0, 1, [2, 3, 4]) + + def test_P3(self): + G = nx.path_graph(3) + self.test(G, 0, 2, [1]) + + def test_S4(self): + G = nx.star_graph(4) + self.test(G, 1, 2, [0]) + + def test_digraph(self): + with pytest.raises(nx.NetworkXNotImplemented): + G = nx.DiGraph() + G.add_edges_from([(0, 1), (1, 2)]) + self.func(G, 0, 2) + + def test_nonexistent_nodes(self): + G = nx.complete_graph(5) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 5, 4) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 4, 5) + pytest.raises(nx.NetworkXError, nx.common_neighbors, G, 5, 6) + + def test_custom1(self): + """Case of no common neighbors.""" + G = nx.Graph() + G.add_nodes_from([0, 1]) + self.test(G, 0, 1, []) + + def test_custom2(self): + """Case of equal nodes.""" + G = nx.complete_graph(4) + self.test(G, 0, 0, [1, 2, 3]) + + +@pytest.mark.parametrize( + "graph_type", (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) +) +def test_set_node_attributes(graph_type): + # Test single value + G = nx.path_graph(3, create_using=graph_type) + vals = 100 + attr = "hello" + nx.set_node_attributes(G, vals, attr) + assert G.nodes[0][attr] == vals + assert G.nodes[1][attr] == vals + assert G.nodes[2][attr] == vals + + # Test dictionary + G = nx.path_graph(3, create_using=graph_type) + vals = dict(zip(sorted(G.nodes()), range(len(G)))) + attr = "hi" + nx.set_node_attributes(G, vals, attr) + assert G.nodes[0][attr] == 0 + assert G.nodes[1][attr] == 1 + assert G.nodes[2][attr] == 2 + + # Test dictionary of dictionaries + G = nx.path_graph(3, create_using=graph_type) + d = {"hi": 0, "hello": 200} + vals = dict.fromkeys(G.nodes(), d) + vals.pop(0) + nx.set_node_attributes(G, vals) + assert G.nodes[0] == {} + assert G.nodes[1]["hi"] == 0 + assert G.nodes[2]["hello"] == 200 + + +@pytest.mark.parametrize( + ("values", "name"), + ( + ({0: "red", 1: "blue"}, "color"), # values dictionary + ({0: {"color": "red"}, 1: {"color": "blue"}}, None), # dict-of-dict + ), +) +def test_set_node_attributes_ignores_extra_nodes(values, name): + """ + When `values` is a dict or dict-of-dict keyed by nodes, ensure that keys + that correspond to nodes not in G are ignored. + """ + G = nx.Graph() + G.add_node(0) + nx.set_node_attributes(G, values, name) + assert G.nodes[0]["color"] == "red" + assert 1 not in G.nodes + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_set_edge_attributes(graph_type): + # Test single value + G = nx.path_graph(3, create_using=graph_type) + attr = "hello" + vals = 3 + nx.set_edge_attributes(G, vals, attr) + assert G[0][1][attr] == vals + assert G[1][2][attr] == vals + + # Test multiple values + G = nx.path_graph(3, create_using=graph_type) + attr = "hi" + edges = [(0, 1), (1, 2)] + vals = dict(zip(edges, range(len(edges)))) + nx.set_edge_attributes(G, vals, attr) + assert G[0][1][attr] == 0 + assert G[1][2][attr] == 1 + + # Test dictionary of dictionaries + G = nx.path_graph(3, create_using=graph_type) + d = {"hi": 0, "hello": 200} + edges = [(0, 1)] + vals = dict.fromkeys(edges, d) + nx.set_edge_attributes(G, vals) + assert G[0][1]["hi"] == 0 + assert G[0][1]["hello"] == 200 + assert G[1][2] == {} + + +@pytest.mark.parametrize( + ("values", "name"), + ( + ({(0, 1): 1.0, (0, 2): 2.0}, "weight"), # values dict + ({(0, 1): {"weight": 1.0}, (0, 2): {"weight": 2.0}}, None), # values dod + ), +) +def test_set_edge_attributes_ignores_extra_edges(values, name): + """If `values` is a dict or dict-of-dicts containing edges that are not in + G, data associate with these edges should be ignored. + """ + G = nx.Graph([(0, 1)]) + nx.set_edge_attributes(G, values, name) + assert G[0][1]["weight"] == 1.0 + assert (0, 2) not in G.edges + + +@pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.MultiDiGraph)) +def test_set_edge_attributes_multi(graph_type): + # Test single value + G = nx.path_graph(3, create_using=graph_type) + attr = "hello" + vals = 3 + nx.set_edge_attributes(G, vals, attr) + assert G[0][1][0][attr] == vals + assert G[1][2][0][attr] == vals + + # Test multiple values + G = nx.path_graph(3, create_using=graph_type) + attr = "hi" + edges = [(0, 1, 0), (1, 2, 0)] + vals = dict(zip(edges, range(len(edges)))) + nx.set_edge_attributes(G, vals, attr) + assert G[0][1][0][attr] == 0 + assert G[1][2][0][attr] == 1 + + # Test dictionary of dictionaries + G = nx.path_graph(3, create_using=graph_type) + d = {"hi": 0, "hello": 200} + edges = [(0, 1, 0)] + vals = dict.fromkeys(edges, d) + nx.set_edge_attributes(G, vals) + assert G[0][1][0]["hi"] == 0 + assert G[0][1][0]["hello"] == 200 + assert G[1][2][0] == {} + + +@pytest.mark.parametrize( + ("values", "name"), + ( + ({(0, 1, 0): 1.0, (0, 2, 0): 2.0}, "weight"), # values dict + ({(0, 1, 0): {"weight": 1.0}, (0, 2, 0): {"weight": 2.0}}, None), # values dod + ), +) +def test_set_edge_attributes_multi_ignores_extra_edges(values, name): + """If `values` is a dict or dict-of-dicts containing edges that are not in + G, data associate with these edges should be ignored. + """ + G = nx.MultiGraph([(0, 1, 0), (0, 1, 1)]) + nx.set_edge_attributes(G, values, name) + assert G[0][1][0]["weight"] == 1.0 + assert G[0][1][1] == {} + assert (0, 2) not in G.edges() + + +def test_get_node_attributes(): + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + for G in graphs: + G = nx.path_graph(3, create_using=G) + attr = "hello" + vals = 100 + nx.set_node_attributes(G, vals, attr) + attrs = nx.get_node_attributes(G, attr) + assert attrs[0] == vals + assert attrs[1] == vals + assert attrs[2] == vals + default_val = 1 + G.add_node(4) + attrs = nx.get_node_attributes(G, attr, default=default_val) + assert attrs[4] == default_val + + +def test_get_edge_attributes(): + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + for G in graphs: + G = nx.path_graph(3, create_using=G) + attr = "hello" + vals = 100 + nx.set_edge_attributes(G, vals, attr) + attrs = nx.get_edge_attributes(G, attr) + assert len(attrs) == 2 + + for edge in G.edges: + assert attrs[edge] == vals + + default_val = vals + G.add_edge(4, 5) + deafult_attrs = nx.get_edge_attributes(G, attr, default=default_val) + assert len(deafult_attrs) == 3 + + for edge in G.edges: + assert deafult_attrs[edge] == vals + + +@pytest.mark.parametrize( + "graph_type", (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) +) +def test_remove_node_attributes(graph_type): + # Test removing single attribute + G = nx.path_graph(3, create_using=graph_type) + vals = 100 + attr = "hello" + nx.set_node_attributes(G, vals, attr) + nx.remove_node_attributes(G, attr) + assert attr not in G.nodes[0] + assert attr not in G.nodes[1] + assert attr not in G.nodes[2] + + # Test removing single attribute when multiple present + G = nx.path_graph(3, create_using=graph_type) + other_vals = 200 + other_attr = "other" + nx.set_node_attributes(G, vals, attr) + nx.set_node_attributes(G, other_vals, other_attr) + nx.remove_node_attributes(G, attr) + assert attr not in G.nodes[0] + assert G.nodes[0][other_attr] == other_vals + assert attr not in G.nodes[1] + assert G.nodes[1][other_attr] == other_vals + assert attr not in G.nodes[2] + assert G.nodes[2][other_attr] == other_vals + + # Test removing multiple attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_node_attributes(G, vals, attr) + nx.set_node_attributes(G, other_vals, other_attr) + nx.remove_node_attributes(G, attr, other_attr) + assert attr not in G.nodes[0] and other_attr not in G.nodes[0] + assert attr not in G.nodes[1] and other_attr not in G.nodes[1] + assert attr not in G.nodes[2] and other_attr not in G.nodes[2] + + # Test removing multiple (but not all) attributes + G = nx.path_graph(3, create_using=graph_type) + third_vals = 300 + third_attr = "three" + nx.set_node_attributes( + G, + { + n: {attr: vals, other_attr: other_vals, third_attr: third_vals} + for n in G.nodes() + }, + ) + nx.remove_node_attributes(G, other_attr, third_attr) + assert other_attr not in G.nodes[0] and third_attr not in G.nodes[0] + assert other_attr not in G.nodes[1] and third_attr not in G.nodes[1] + assert other_attr not in G.nodes[2] and third_attr not in G.nodes[2] + assert G.nodes[0][attr] == vals + assert G.nodes[1][attr] == vals + assert G.nodes[2][attr] == vals + + # Test incomplete node attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_node_attributes( + G, + { + 1: {attr: vals, other_attr: other_vals}, + 2: {attr: vals, other_attr: other_vals}, + }, + ) + nx.remove_node_attributes(G, attr) + assert attr not in G.nodes[0] + assert attr not in G.nodes[1] + assert attr not in G.nodes[2] + assert G.nodes[1][other_attr] == other_vals + assert G.nodes[2][other_attr] == other_vals + + # Test removing on a subset of nodes + G = nx.path_graph(3, create_using=graph_type) + nx.set_node_attributes( + G, + { + n: {attr: vals, other_attr: other_vals, third_attr: third_vals} + for n in G.nodes() + }, + ) + nx.remove_node_attributes(G, attr, other_attr, nbunch=[0, 1]) + assert attr not in G.nodes[0] and other_attr not in G.nodes[0] + assert attr not in G.nodes[1] and other_attr not in G.nodes[1] + assert attr in G.nodes[2] and other_attr in G.nodes[2] + assert third_attr in G.nodes[0] and G.nodes[0][third_attr] == third_vals + assert third_attr in G.nodes[1] and G.nodes[1][third_attr] == third_vals + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_remove_edge_attributes(graph_type): + # Test removing single attribute + G = nx.path_graph(3, create_using=graph_type) + attr = "hello" + vals = 100 + nx.set_edge_attributes(G, vals, attr) + nx.remove_edge_attributes(G, attr) + assert len(nx.get_edge_attributes(G, attr)) == 0 + + # Test removing only some attributes + G = nx.path_graph(3, create_using=graph_type) + other_attr = "other" + other_vals = 200 + nx.set_edge_attributes(G, vals, attr) + nx.set_edge_attributes(G, other_vals, other_attr) + nx.remove_edge_attributes(G, attr) + + assert attr not in G[0][1] + assert attr not in G[1][2] + assert G[0][1][other_attr] == 200 + assert G[1][2][other_attr] == 200 + + # Test removing multiple attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, vals, attr) + nx.set_edge_attributes(G, other_vals, other_attr) + nx.remove_edge_attributes(G, attr, other_attr) + assert attr not in G[0][1] and other_attr not in G[0][1] + assert attr not in G[1][2] and other_attr not in G[1][2] + + # Test removing multiple (not all) attributes + G = nx.path_graph(3, create_using=graph_type) + third_attr = "third" + third_vals = 300 + nx.set_edge_attributes( + G, + { + (u, v): {attr: vals, other_attr: other_vals, third_attr: third_vals} + for u, v in G.edges() + }, + ) + nx.remove_edge_attributes(G, other_attr, third_attr) + assert other_attr not in G[0][1] and third_attr not in G[0][1] + assert other_attr not in G[1][2] and third_attr not in G[1][2] + assert G[0][1][attr] == vals + assert G[1][2][attr] == vals + + # Test removing incomplete edge attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, {(0, 1): {attr: vals, other_attr: other_vals}}) + nx.remove_edge_attributes(G, other_attr) + assert other_attr not in G[0][1] and G[0][1][attr] == vals + assert other_attr not in G[1][2] + + # Test removing subset of edge attributes + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes( + G, + { + (u, v): {attr: vals, other_attr: other_vals, third_attr: third_vals} + for u, v in G.edges() + }, + ) + nx.remove_edge_attributes(G, other_attr, third_attr, ebunch=[(0, 1)]) + assert other_attr not in G[0][1] and third_attr not in G[0][1] + assert other_attr in G[1][2] and third_attr in G[1][2] + + +@pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.MultiDiGraph)) +def test_remove_multi_edge_attributes(graph_type): + # Test removing single attribute + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + attr = "hello" + vals = 100 + nx.set_edge_attributes(G, vals, attr) + nx.remove_edge_attributes(G, attr) + assert attr not in G[0][1][0] + assert attr not in G[1][2][0] + assert attr not in G[1][2][1] + + # Test removing only some attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + other_attr = "other" + other_vals = 200 + nx.set_edge_attributes(G, vals, attr) + nx.set_edge_attributes(G, other_vals, other_attr) + nx.remove_edge_attributes(G, attr) + assert attr not in G[0][1][0] + assert attr not in G[1][2][0] + assert attr not in G[1][2][1] + assert G[0][1][0][other_attr] == other_vals + assert G[1][2][0][other_attr] == other_vals + assert G[1][2][1][other_attr] == other_vals + + # Test removing multiple attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + nx.set_edge_attributes(G, vals, attr) + nx.set_edge_attributes(G, other_vals, other_attr) + nx.remove_edge_attributes(G, attr, other_attr) + assert attr not in G[0][1][0] and other_attr not in G[0][1][0] + assert attr not in G[1][2][0] and other_attr not in G[1][2][0] + assert attr not in G[1][2][1] and other_attr not in G[1][2][1] + + # Test removing multiple (not all) attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + third_attr = "third" + third_vals = 300 + nx.set_edge_attributes( + G, + { + (u, v, k): {attr: vals, other_attr: other_vals, third_attr: third_vals} + for u, v, k in G.edges(keys=True) + }, + ) + nx.remove_edge_attributes(G, other_attr, third_attr) + assert other_attr not in G[0][1][0] and third_attr not in G[0][1][0] + assert other_attr not in G[1][2][0] and other_attr not in G[1][2][0] + assert other_attr not in G[1][2][1] and other_attr not in G[1][2][1] + assert G[0][1][0][attr] == vals + assert G[1][2][0][attr] == vals + assert G[1][2][1][attr] == vals + + # Test removing incomplete edge attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + nx.set_edge_attributes( + G, + { + (0, 1, 0): {attr: vals, other_attr: other_vals}, + (1, 2, 1): {attr: vals, other_attr: other_vals}, + }, + ) + nx.remove_edge_attributes(G, other_attr) + assert other_attr not in G[0][1][0] and G[0][1][0][attr] == vals + assert other_attr not in G[1][2][0] + assert other_attr not in G[1][2][1] + + # Test removing subset of edge attributes + G = nx.path_graph(3, create_using=graph_type) + G.add_edge(1, 2) + nx.set_edge_attributes( + G, + { + (0, 1, 0): {attr: vals, other_attr: other_vals}, + (1, 2, 0): {attr: vals, other_attr: other_vals}, + (1, 2, 1): {attr: vals, other_attr: other_vals}, + }, + ) + nx.remove_edge_attributes(G, attr, ebunch=[(0, 1, 0), (1, 2, 0)]) + assert attr not in G[0][1][0] and other_attr in G[0][1][0] + assert attr not in G[1][2][0] and other_attr in G[1][2][0] + assert attr in G[1][2][1] and other_attr in G[1][2][1] + + +def test_is_empty(): + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + for G in graphs: + assert nx.is_empty(G) + G.add_nodes_from(range(5)) + assert nx.is_empty(G) + G.add_edges_from([(1, 2), (3, 4)]) + assert not nx.is_empty(G) + + +@pytest.mark.parametrize( + "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] +) +def test_selfloops(graph_type): + G = nx.complete_graph(3, create_using=graph_type) + G.add_edge(0, 0) + assert nodes_equal(nx.nodes_with_selfloops(G), [0]) + assert edges_equal(nx.selfloop_edges(G), [(0, 0)]) + assert edges_equal(nx.selfloop_edges(G, data=True), [(0, 0, {})]) + assert nx.number_of_selfloops(G) == 1 + + +@pytest.mark.parametrize( + "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] +) +def test_selfloop_edges_attr(graph_type): + G = nx.complete_graph(3, create_using=graph_type) + G.add_edge(0, 0) + G.add_edge(1, 1, weight=2) + assert edges_equal( + nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] + ) + assert edges_equal(nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)]) + + +def test_selfloop_edges_multi_with_data_and_keys(): + G = nx.complete_graph(3, create_using=nx.MultiGraph) + G.add_edge(0, 0, weight=10) + G.add_edge(0, 0, weight=100) + assert edges_equal( + nx.selfloop_edges(G, data="weight", keys=True), [(0, 0, 0, 10), (0, 0, 1, 100)] + ) + + +@pytest.mark.parametrize("graph_type", [nx.Graph, nx.DiGraph]) +def test_selfloops_removal(graph_type): + G = nx.complete_graph(3, create_using=graph_type) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, keys=True)) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, data=True)) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G, keys=True, data=True)) + + +@pytest.mark.parametrize("graph_type", [nx.MultiGraph, nx.MultiDiGraph]) +def test_selfloops_removal_multi(graph_type): + """test removing selfloops behavior vis-a-vis altering a dict while iterating. + cf. gh-4068""" + G = nx.complete_graph(3, create_using=graph_type) + # Defaults - see gh-4080 + G.add_edge(0, 0) + G.add_edge(0, 0) + G.remove_edges_from(nx.selfloop_edges(G)) + assert (0, 0) not in G.edges() + # With keys + G.add_edge(0, 0) + G.add_edge(0, 0) + with pytest.raises(RuntimeError): + G.remove_edges_from(nx.selfloop_edges(G, keys=True)) + # With data + G.add_edge(0, 0) + G.add_edge(0, 0) + with pytest.raises(TypeError): + G.remove_edges_from(nx.selfloop_edges(G, data=True)) + # With keys and data + G.add_edge(0, 0) + G.add_edge(0, 0) + with pytest.raises(RuntimeError): + G.remove_edges_from(nx.selfloop_edges(G, data=True, keys=True)) + + +def test_pathweight(): + valid_path = [1, 2, 3] + invalid_path = [1, 3, 2] + graphs = [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()] + edges = [ + (1, 2, {"cost": 5, "dist": 6}), + (2, 3, {"cost": 3, "dist": 4}), + (1, 2, {"cost": 1, "dist": 2}), + ] + for graph in graphs: + graph.add_edges_from(edges) + assert nx.path_weight(graph, valid_path, "cost") == 4 + assert nx.path_weight(graph, valid_path, "dist") == 6 + pytest.raises(nx.NetworkXNoPath, nx.path_weight, graph, invalid_path, "cost") + + +@pytest.mark.parametrize( + "G", (nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()) +) +def test_ispath(G): + G.add_edges_from([(1, 2), (2, 3), (1, 2), (3, 4)]) + valid_path = [1, 2, 3, 4] + invalid_path = [1, 2, 4, 3] # wrong node order + another_invalid_path = [1, 2, 3, 4, 5] # contains node not in G + assert nx.is_path(G, valid_path) + assert not nx.is_path(G, invalid_path) + assert not nx.is_path(G, another_invalid_path) + + +@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph())) +def test_restricted_view(G): + G.add_edges_from([(0, 1), (0, 2), (0, 3), (1, 0), (1, 1), (1, 2)]) + G.add_node(4) + H = nx.restricted_view(G, [0, 2, 5], [(1, 2), (3, 4)]) + assert set(H.nodes()) == {1, 3, 4} + assert set(H.edges()) == {(1, 1)} + + +@pytest.mark.parametrize("G", (nx.MultiGraph(), nx.MultiDiGraph())) +def test_restricted_view_multi(G): + G.add_edges_from( + [(0, 1, 0), (0, 2, 0), (0, 3, 0), (0, 1, 1), (1, 0, 0), (1, 1, 0), (1, 2, 0)] + ) + G.add_node(4) + H = nx.restricted_view(G, [0, 2, 5], [(1, 2, 0), (3, 4, 0)]) + assert set(H.nodes()) == {1, 3, 4} + assert set(H.edges()) == {(1, 1)} diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graph.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graph.py new file mode 100644 index 0000000..2b4ffe5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graph.py @@ -0,0 +1,927 @@ +import gc +import pickle +import platform +import weakref + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, graphs_equal, nodes_equal + + +def test_degree_node_not_found_exception_message(): + """See gh-7740""" + G = nx.path_graph(5) + with pytest.raises(nx.NetworkXError, match="Node.*is not in the graph"): + G.degree(100) + + +class BaseGraphTester: + """Tests for data-structure independent graph class features.""" + + def test_contains(self): + G = self.K3 + assert 1 in G + assert 4 not in G + assert "b" not in G + assert [] not in G # no exception for nonhashable + assert {1: 1} not in G # no exception for nonhashable + + def test_order(self): + G = self.K3 + assert len(G) == 3 + assert G.order() == 3 + assert G.number_of_nodes() == 3 + + def test_nodes(self): + G = self.K3 + assert isinstance(G._node, G.node_dict_factory) + assert isinstance(G._adj, G.adjlist_outer_dict_factory) + assert all( + isinstance(adj, G.adjlist_inner_dict_factory) for adj in G._adj.values() + ) + assert sorted(G.nodes()) == self.k3nodes + assert sorted(G.nodes(data=True)) == [(0, {}), (1, {}), (2, {})] + + def test_none_node(self): + G = self.Graph() + with pytest.raises(ValueError): + G.add_node(None) + with pytest.raises(ValueError): + G.add_nodes_from([None]) + with pytest.raises(ValueError): + G.add_edge(0, None) + with pytest.raises(ValueError): + G.add_edges_from([(0, None)]) + + def test_has_node(self): + G = self.K3 + assert G.has_node(1) + assert not G.has_node(4) + assert not G.has_node([]) # no exception for nonhashable + assert not G.has_node({1: 1}) # no exception for nonhashable + + def test_has_edge(self): + G = self.K3 + assert G.has_edge(0, 1) + assert not G.has_edge(0, -1) + + def test_neighbors(self): + G = self.K3 + assert sorted(G.neighbors(0)) == [1, 2] + with pytest.raises(nx.NetworkXError): + G.neighbors(-1) + + @pytest.mark.skipif( + platform.python_implementation() == "PyPy", reason="PyPy gc is different" + ) + def test_memory_leak(self): + G = self.Graph() + + def count_objects_of_type(_type): + # Iterating over all objects tracked by gc can include weak references + # whose weakly-referenced objects may no longer exist. Calling `isinstance` + # on such a weak reference will raise ReferenceError. There are at least + # three workarounds for this: one is to compare type names instead of using + # `isinstance` such as `type(obj).__name__ == typename`, another is to use + # `type(obj) == _type`, and the last is to ignore ProxyTypes as we do below. + # NOTE: even if this safeguard is deemed unnecessary to pass NetworkX tests, + # we should still keep it for maximum safety for other NetworkX backends. + return sum( + 1 + for obj in gc.get_objects() + if not isinstance(obj, weakref.ProxyTypes) and isinstance(obj, _type) + ) + + gc.collect() + before = count_objects_of_type(self.Graph) + G.copy() + gc.collect() + after = count_objects_of_type(self.Graph) + assert before == after + + # test a subgraph of the base class + class MyGraph(self.Graph): + pass + + gc.collect() + G = MyGraph() + before = count_objects_of_type(MyGraph) + G.copy() + gc.collect() + after = count_objects_of_type(MyGraph) + assert before == after + + def test_edges(self): + G = self.K3 + assert isinstance(G._adj, G.adjlist_outer_dict_factory) + assert edges_equal(G.edges(), [(0, 1), (0, 2), (1, 2)]) + assert edges_equal(G.edges(0), [(0, 1), (0, 2)]) + assert edges_equal(G.edges([0, 1]), [(0, 1), (0, 2), (1, 2)]) + with pytest.raises(nx.NetworkXError): + G.edges(-1) + + def test_degree(self): + G = self.K3 + assert sorted(G.degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.degree()) == {0: 2, 1: 2, 2: 2} + assert G.degree(0) == 2 + with pytest.raises(nx.NetworkXError): + G.degree(-1) # node not in graph + + def test_size(self): + G = self.K3 + assert G.size() == 3 + assert G.number_of_edges() == 3 + + def test_nbunch_iter(self): + G = self.K3 + assert nodes_equal(G.nbunch_iter(), self.k3nodes) # all nodes + assert nodes_equal(G.nbunch_iter(0), [0]) # single node + assert nodes_equal(G.nbunch_iter([0, 1]), [0, 1]) # sequence + # sequence with none in graph + assert nodes_equal(G.nbunch_iter([-1]), []) + # string sequence with none in graph + assert nodes_equal(G.nbunch_iter("foo"), []) + # node not in graph doesn't get caught upon creation of iterator + bunch = G.nbunch_iter(-1) + # but gets caught when iterator used + with pytest.raises(nx.NetworkXError, match="is not in the graph"): + list(bunch) + # unhashable doesn't get caught upon creation of iterator + bunch = G.nbunch_iter([0, 1, 2, {}]) + # but gets caught when iterator hits the unhashable + with pytest.raises( + nx.NetworkXError, match="in sequence nbunch is not a valid node" + ): + list(bunch) + + def test_nbunch_iter_node_format_raise(self): + # Tests that a node that would have failed string formatting + # doesn't cause an error when attempting to raise a + # :exc:`nx.NetworkXError`. + + # For more information, see pull request #1813. + G = self.Graph() + nbunch = [("x", set())] + with pytest.raises(nx.NetworkXError): + list(G.nbunch_iter(nbunch)) + + def test_selfloop_degree(self): + G = self.Graph() + G.add_edge(1, 1) + assert sorted(G.degree()) == [(1, 2)] + assert dict(G.degree()) == {1: 2} + assert G.degree(1) == 2 + assert sorted(G.degree([1])) == [(1, 2)] + assert G.degree(1, weight="weight") == 2 + + def test_selfloops(self): + G = self.K3.copy() + G.add_edge(0, 0) + assert nodes_equal(nx.nodes_with_selfloops(G), [0]) + assert edges_equal(nx.selfloop_edges(G), [(0, 0)]) + assert nx.number_of_selfloops(G) == 1 + G.remove_edge(0, 0) + G.add_edge(0, 0) + G.remove_edges_from([(0, 0)]) + G.add_edge(1, 1) + G.remove_node(1) + G.add_edge(0, 0) + G.add_edge(1, 1) + G.remove_nodes_from([0, 1]) + + def test_cache_reset(self): + G = self.K3.copy() + old_adj = G.adj + assert id(G.adj) == id(old_adj) + G._adj = {} + assert id(G.adj) != id(old_adj) + + old_nodes = G.nodes + assert id(G.nodes) == id(old_nodes) + G._node = {} + assert id(G.nodes) != id(old_nodes) + + def test_attributes_cached(self): + G = self.K3.copy() + assert id(G.nodes) == id(G.nodes) + assert id(G.edges) == id(G.edges) + assert id(G.degree) == id(G.degree) + assert id(G.adj) == id(G.adj) + + +class BaseAttrGraphTester(BaseGraphTester): + """Tests of graph class attribute features.""" + + def test_weighted_degree(self): + G = self.Graph() + G.add_edge(1, 2, weight=2, other=3) + G.add_edge(2, 3, weight=3, other=4) + assert sorted(d for n, d in G.degree(weight="weight")) == [2, 3, 5] + assert dict(G.degree(weight="weight")) == {1: 2, 2: 5, 3: 3} + assert G.degree(1, weight="weight") == 2 + assert nodes_equal((G.degree([1], weight="weight")), [(1, 2)]) + + assert nodes_equal((d for n, d in G.degree(weight="other")), [3, 7, 4]) + assert dict(G.degree(weight="other")) == {1: 3, 2: 7, 3: 4} + assert G.degree(1, weight="other") == 3 + assert edges_equal((G.degree([1], weight="other")), [(1, 3)]) + + def add_attributes(self, G): + G.graph["foo"] = [] + G.nodes[0]["foo"] = [] + G.remove_edge(1, 2) + ll = [] + G.add_edge(1, 2, foo=ll) + G.add_edge(2, 1, foo=ll) + + def test_name(self): + G = self.Graph(name="") + assert G.name == "" + G = self.Graph(name="test") + assert G.name == "test" + + def test_str_unnamed(self): + G = self.Graph() + G.add_edges_from([(1, 2), (2, 3)]) + assert str(G) == f"{type(G).__name__} with 3 nodes and 2 edges" + + def test_str_named(self): + G = self.Graph(name="foo") + G.add_edges_from([(1, 2), (2, 3)]) + assert str(G) == f"{type(G).__name__} named 'foo' with 3 nodes and 2 edges" + + def test_graph_chain(self): + G = self.Graph([(0, 1), (1, 2)]) + DG = G.to_directed(as_view=True) + SDG = DG.subgraph([0, 1]) + RSDG = SDG.reverse(copy=False) + assert G is DG._graph + assert DG is SDG._graph + assert SDG is RSDG._graph + + def test_copy(self): + G = self.Graph() + G.add_node(0) + G.add_edge(1, 2) + self.add_attributes(G) + # copy edge datadict but any container attr are same + H = G.copy() + self.graphs_equal(H, G) + self.different_attrdict(H, G) + self.shallow_copy_attrdict(H, G) + + def test_class_copy(self): + G = self.Graph() + G.add_node(0) + G.add_edge(1, 2) + self.add_attributes(G) + # copy edge datadict but any container attr are same + H = G.__class__(G) + self.graphs_equal(H, G) + self.different_attrdict(H, G) + self.shallow_copy_attrdict(H, G) + + def test_fresh_copy(self): + G = self.Graph() + G.add_node(0) + G.add_edge(1, 2) + self.add_attributes(G) + # copy graph structure but use fresh datadict + H = G.__class__() + H.add_nodes_from(G) + H.add_edges_from(G.edges()) + assert len(G.nodes[0]) == 1 + ddict = G.adj[1][2][0] if G.is_multigraph() else G.adj[1][2] + assert len(ddict) == 1 + assert len(H.nodes[0]) == 0 + ddict = H.adj[1][2][0] if H.is_multigraph() else H.adj[1][2] + assert len(ddict) == 0 + + def is_deepcopy(self, H, G): + self.graphs_equal(H, G) + self.different_attrdict(H, G) + self.deep_copy_attrdict(H, G) + + def deep_copy_attrdict(self, H, G): + self.deepcopy_graph_attr(H, G) + self.deepcopy_node_attr(H, G) + self.deepcopy_edge_attr(H, G) + + def deepcopy_graph_attr(self, H, G): + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] != H.graph["foo"] + + def deepcopy_node_attr(self, H, G): + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] != H.nodes[0]["foo"] + + def deepcopy_edge_attr(self, H, G): + assert G[1][2]["foo"] == H[1][2]["foo"] + G[1][2]["foo"].append(1) + assert G[1][2]["foo"] != H[1][2]["foo"] + + def is_shallow_copy(self, H, G): + self.graphs_equal(H, G) + self.shallow_copy_attrdict(H, G) + + def shallow_copy_attrdict(self, H, G): + self.shallow_copy_graph_attr(H, G) + self.shallow_copy_node_attr(H, G) + self.shallow_copy_edge_attr(H, G) + + def shallow_copy_graph_attr(self, H, G): + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] == H.graph["foo"] + + def shallow_copy_node_attr(self, H, G): + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + + def shallow_copy_edge_attr(self, H, G): + assert G[1][2]["foo"] == H[1][2]["foo"] + G[1][2]["foo"].append(1) + assert G[1][2]["foo"] == H[1][2]["foo"] + + def same_attrdict(self, H, G): + old_foo = H[1][2]["foo"] + H.adj[1][2]["foo"] = "baz" + assert G.edges == H.edges + H.adj[1][2]["foo"] = old_foo + assert G.edges == H.edges + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G.nodes == H.nodes + H.nodes[0]["foo"] = old_foo + assert G.nodes == H.nodes + + def different_attrdict(self, H, G): + old_foo = H[1][2]["foo"] + H.adj[1][2]["foo"] = "baz" + assert G._adj != H._adj + H.adj[1][2]["foo"] = old_foo + assert G._adj == H._adj + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G._node != H._node + H.nodes[0]["foo"] = old_foo + assert G._node == H._node + + def graphs_equal(self, H, G): + assert G._adj == H._adj + assert G._node == H._node + assert G.graph == H.graph + assert G.name == H.name + if not G.is_directed() and not H.is_directed(): + assert H._adj[1][2] is H._adj[2][1] + assert G._adj[1][2] is G._adj[2][1] + else: # at least one is directed + if not G.is_directed(): + G._pred = G._adj + G._succ = G._adj + if not H.is_directed(): + H._pred = H._adj + H._succ = H._adj + assert G._pred == H._pred + assert G._succ == H._succ + assert H._succ[1][2] is H._pred[2][1] + assert G._succ[1][2] is G._pred[2][1] + + def test_graph_attr(self): + G = self.K3.copy() + G.graph["foo"] = "bar" + assert isinstance(G.graph, G.graph_attr_dict_factory) + assert G.graph["foo"] == "bar" + del G.graph["foo"] + assert G.graph == {} + H = self.Graph(foo="bar") + assert H.graph["foo"] == "bar" + + def test_node_attr(self): + G = self.K3.copy() + G.add_node(1, foo="bar") + assert all( + isinstance(d, G.node_attr_dict_factory) for u, d in G.nodes(data=True) + ) + assert nodes_equal(G.nodes(), [0, 1, 2]) + assert nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "bar"}), (2, {})]) + G.nodes[1]["foo"] = "baz" + assert nodes_equal(G.nodes(data=True), [(0, {}), (1, {"foo": "baz"}), (2, {})]) + assert nodes_equal(G.nodes(data="foo"), [(0, None), (1, "baz"), (2, None)]) + assert nodes_equal( + G.nodes(data="foo", default="bar"), [(0, "bar"), (1, "baz"), (2, "bar")] + ) + + def test_node_attr2(self): + G = self.K3.copy() + a = {"foo": "bar"} + G.add_node(3, **a) + assert nodes_equal(G.nodes(), [0, 1, 2, 3]) + assert nodes_equal( + G.nodes(data=True), [(0, {}), (1, {}), (2, {}), (3, {"foo": "bar"})] + ) + + def test_edge_lookup(self): + G = self.Graph() + G.add_edge(1, 2, foo="bar") + assert edges_equal(G.edges[1, 2], {"foo": "bar"}) + + def test_edge_attr(self): + G = self.Graph() + G.add_edge(1, 2, foo="bar") + assert all( + isinstance(d, G.edge_attr_dict_factory) for u, v, d in G.edges(data=True) + ) + assert edges_equal(G.edges(data=True), [(1, 2, {"foo": "bar"})]) + assert edges_equal(G.edges(data="foo"), [(1, 2, "bar")]) + + def test_edge_attr2(self): + G = self.Graph() + G.add_edges_from([(1, 2), (3, 4)], foo="foo") + assert edges_equal( + G.edges(data=True), [(1, 2, {"foo": "foo"}), (3, 4, {"foo": "foo"})] + ) + assert edges_equal(G.edges(data="foo"), [(1, 2, "foo"), (3, 4, "foo")]) + + def test_edge_attr3(self): + G = self.Graph() + G.add_edges_from([(1, 2, {"weight": 32}), (3, 4, {"weight": 64})], foo="foo") + assert edges_equal( + G.edges(data=True), + [ + (1, 2, {"foo": "foo", "weight": 32}), + (3, 4, {"foo": "foo", "weight": 64}), + ], + ) + + G.remove_edges_from([(1, 2), (3, 4)]) + G.add_edge(1, 2, data=7, spam="bar", bar="foo") + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + + def test_edge_attr4(self): + G = self.Graph() + G.add_edge(1, 2, data=7, spam="bar", bar="foo") + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + G[1][2]["data"] = 10 # OK to set data like this + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})] + ) + + G.adj[1][2]["data"] = 20 + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})] + ) + G.edges[1, 2]["data"] = 21 # another spelling, "edge" + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})] + ) + G.adj[1][2]["listdata"] = [20, 200] + G.adj[1][2]["weight"] = 20 + dd = { + "data": 21, + "spam": "bar", + "bar": "foo", + "listdata": [20, 200], + "weight": 20, + } + assert edges_equal(G.edges(data=True), [(1, 2, dd)]) + + def test_to_undirected(self): + G = self.K3 + self.add_attributes(G) + H = nx.Graph(G) + self.is_shallow_copy(H, G) + self.different_attrdict(H, G) + H = G.to_undirected() + self.is_deepcopy(H, G) + + def test_to_directed_as_view(self): + H = nx.path_graph(2, create_using=self.Graph) + H2 = H.to_directed(as_view=True) + assert H is H2._graph + assert H2.has_edge(0, 1) + assert H2.has_edge(1, 0) or H.is_directed() + pytest.raises(nx.NetworkXError, H2.add_node, -1) + pytest.raises(nx.NetworkXError, H2.add_edge, 1, 2) + H.add_edge(1, 2) + assert H2.has_edge(1, 2) + assert H2.has_edge(2, 1) or H.is_directed() + + def test_to_undirected_as_view(self): + H = nx.path_graph(2, create_using=self.Graph) + H2 = H.to_undirected(as_view=True) + assert H is H2._graph + assert H2.has_edge(0, 1) + assert H2.has_edge(1, 0) + pytest.raises(nx.NetworkXError, H2.add_node, -1) + pytest.raises(nx.NetworkXError, H2.add_edge, 1, 2) + H.add_edge(1, 2) + assert H2.has_edge(1, 2) + assert H2.has_edge(2, 1) + + def test_directed_class(self): + G = self.Graph() + + class newGraph(G.to_undirected_class()): + def to_directed_class(self): + return newDiGraph + + def to_undirected_class(self): + return newGraph + + class newDiGraph(G.to_directed_class()): + def to_directed_class(self): + return newDiGraph + + def to_undirected_class(self): + return newGraph + + G = newDiGraph() if G.is_directed() else newGraph() + H = G.to_directed() + assert isinstance(H, newDiGraph) + H = G.to_undirected() + assert isinstance(H, newGraph) + + def test_to_directed(self): + G = self.K3 + self.add_attributes(G) + H = nx.DiGraph(G) + self.is_shallow_copy(H, G) + self.different_attrdict(H, G) + H = G.to_directed() + self.is_deepcopy(H, G) + + def test_subgraph(self): + G = self.K3 + self.add_attributes(G) + H = G.subgraph([0, 1, 2, 5]) + self.graphs_equal(H, G) + self.same_attrdict(H, G) + self.shallow_copy_attrdict(H, G) + + H = G.subgraph(0) + assert H.adj == {0: {}} + H = G.subgraph([]) + assert H.adj == {} + assert G.adj != {} + + def test_selfloops_attr(self): + G = self.K3.copy() + G.add_edge(0, 0) + G.add_edge(1, 1, weight=2) + assert edges_equal( + nx.selfloop_edges(G, data=True), [(0, 0, {}), (1, 1, {"weight": 2})] + ) + assert edges_equal( + nx.selfloop_edges(G, data="weight"), [(0, 0, None), (1, 1, 2)] + ) + + +class TestGraph(BaseAttrGraphTester): + """Tests specific to dict-of-dict-of-dict graph data structure""" + + def setup_method(self): + self.Graph = nx.Graph + # build dict-of-dict-of-dict K3 + ed1, ed2, ed3 = ({}, {}, {}) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._adj = self.k3adj + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + def test_pickle(self): + G = self.K3 + pg = pickle.loads(pickle.dumps(G, -1)) + self.graphs_equal(pg, G) + pg = pickle.loads(pickle.dumps(G)) + self.graphs_equal(pg, G) + + def test_data_input(self): + G = self.Graph({1: [2], 2: [1]}, name="test") + assert G.name == "test" + assert sorted(G.adj.items()) == [(1, {2: {}}), (2, {1: {}})] + + def test_adjacency(self): + G = self.K3 + assert dict(G.adjacency()) == { + 0: {1: {}, 2: {}}, + 1: {0: {}, 2: {}}, + 2: {0: {}, 1: {}}, + } + + def test_getitem(self): + G = self.K3 + assert G.adj[0] == {1: {}, 2: {}} + assert G[0] == {1: {}, 2: {}} + with pytest.raises(KeyError): + G.__getitem__("j") + with pytest.raises(TypeError): + G.__getitem__(["A"]) + + def test_add_node(self): + G = self.Graph() + G.add_node(0) + assert G.adj == {0: {}} + # test add attributes + G.add_node(1, c="red") + G.add_node(2, c="blue") + G.add_node(3, c="red") + assert G.nodes[1]["c"] == "red" + assert G.nodes[2]["c"] == "blue" + assert G.nodes[3]["c"] == "red" + # test updating attributes + G.add_node(1, c="blue") + G.add_node(2, c="red") + G.add_node(3, c="blue") + assert G.nodes[1]["c"] == "blue" + assert G.nodes[2]["c"] == "red" + assert G.nodes[3]["c"] == "blue" + + def test_add_nodes_from(self): + G = self.Graph() + G.add_nodes_from([0, 1, 2]) + assert G.adj == {0: {}, 1: {}, 2: {}} + # test add attributes + G.add_nodes_from([0, 1, 2], c="red") + assert G.nodes[0]["c"] == "red" + assert G.nodes[2]["c"] == "red" + # test that attribute dicts are not the same + assert G.nodes[0] is not G.nodes[1] + # test updating attributes + G.add_nodes_from([0, 1, 2], c="blue") + assert G.nodes[0]["c"] == "blue" + assert G.nodes[2]["c"] == "blue" + assert G.nodes[0] is not G.nodes[1] + # test tuple input + H = self.Graph() + H.add_nodes_from(G.nodes(data=True)) + assert H.nodes[0]["c"] == "blue" + assert H.nodes[2]["c"] == "blue" + assert H.nodes[0] is not H.nodes[1] + # specific overrides general + H.add_nodes_from([0, (1, {"c": "green"}), (3, {"c": "cyan"})], c="red") + assert H.nodes[0]["c"] == "red" + assert H.nodes[1]["c"] == "green" + assert H.nodes[2]["c"] == "blue" + assert H.nodes[3]["c"] == "cyan" + + def test_remove_node(self): + G = self.K3.copy() + G.remove_node(0) + assert G.adj == {1: {2: {}}, 2: {1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_node(-1) + + # generator here to implement list,set,string... + + def test_remove_nodes_from(self): + G = self.K3.copy() + G.remove_nodes_from([0, 1]) + assert G.adj == {2: {}} + G.remove_nodes_from([-1]) # silent fail + + def test_add_edge(self): + G = self.Graph() + G.add_edge(0, 1) + assert G.adj == {0: {1: {}}, 1: {0: {}}} + G = self.Graph() + G.add_edge(*(0, 1)) + assert G.adj == {0: {1: {}}, 1: {0: {}}} + G = self.Graph() + with pytest.raises(ValueError): + G.add_edge(None, "anything") + + def test_add_edges_from(self): + G = self.Graph() + G.add_edges_from([(0, 1), (0, 2, {"weight": 3})]) + assert G.adj == { + 0: {1: {}, 2: {"weight": 3}}, + 1: {0: {}}, + 2: {0: {"weight": 3}}, + } + G = self.Graph() + G.add_edges_from([(0, 1), (0, 2, {"weight": 3}), (1, 2, {"data": 4})], data=2) + assert G.adj == { + 0: {1: {"data": 2}, 2: {"weight": 3, "data": 2}}, + 1: {0: {"data": 2}, 2: {"data": 4}}, + 2: {0: {"weight": 3, "data": 2}, 1: {"data": 4}}, + } + + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0,)]) # too few in tuple + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0, 1, 2, 3)]) # too many in tuple + with pytest.raises(TypeError): + G.add_edges_from([0]) # not a tuple + with pytest.raises(ValueError): + G.add_edges_from([(None, 3), (3, 2)]) # None cannot be a node + + def test_remove_edge(self): + G = self.K3.copy() + G.remove_edge(0, 1) + assert G.adj == {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) + + def test_remove_edges_from(self): + G = self.K3.copy() + G.remove_edges_from([(0, 1)]) + assert G.adj == {0: {2: {}}, 1: {2: {}}, 2: {0: {}, 1: {}}} + G.remove_edges_from([(0, 0)]) # silent fail + + def test_clear(self): + G = self.K3.copy() + G.graph["name"] = "K3" + G.clear() + assert list(G.nodes) == [] + assert G.adj == {} + assert G.graph == {} + + def test_clear_edges(self): + G = self.K3.copy() + G.graph["name"] = "K3" + nodes = list(G.nodes) + G.clear_edges() + assert list(G.nodes) == nodes + assert G.adj == {0: {}, 1: {}, 2: {}} + assert list(G.edges) == [] + assert G.graph["name"] == "K3" + + def test_edges_data(self): + G = self.K3 + all_edges = [(0, 1, {}), (0, 2, {}), (1, 2, {})] + assert edges_equal(G.edges(data=True), all_edges) + assert edges_equal(G.edges(0, data=True), [(0, 1, {}), (0, 2, {})]) + assert edges_equal(G.edges([0, 1], data=True), all_edges) + with pytest.raises(nx.NetworkXError): + G.edges(-1, True) + + def test_get_edge_data(self): + G = self.K3.copy() + assert G.get_edge_data(0, 1) == {} + assert G[0][1] == {} + assert G.get_edge_data(10, 20) is None + assert G.get_edge_data(-1, 0) is None + assert G.get_edge_data(-1, 0, default=1) == 1 + + def test_update(self): + # specify both edges and nodes + G = self.K3.copy() + G.update(nodes=[3, (4, {"size": 2})], edges=[(4, 5), (6, 7, {"weight": 2})]) + nlist = [ + (0, {}), + (1, {}), + (2, {}), + (3, {}), + (4, {"size": 2}), + (5, {}), + (6, {}), + (7, {}), + ] + assert sorted(G.nodes.data()) == nlist + if G.is_directed(): + elist = [ + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + (4, 5, {}), + (6, 7, {"weight": 2}), + ] + else: + elist = [ + (0, 1, {}), + (0, 2, {}), + (1, 2, {}), + (4, 5, {}), + (6, 7, {"weight": 2}), + ] + assert sorted(G.edges.data()) == elist + assert G.graph == {} + + # no keywords -- order is edges, nodes + G = self.K3.copy() + G.update([(4, 5), (6, 7, {"weight": 2})], [3, (4, {"size": 2})]) + assert sorted(G.nodes.data()) == nlist + assert sorted(G.edges.data()) == elist + assert G.graph == {} + + # update using only a graph + G = self.Graph() + G.graph["foo"] = "bar" + G.add_node(2, data=4) + G.add_edge(0, 1, weight=0.5) + GG = G.copy() + H = self.Graph() + GG.update(H) + assert graphs_equal(G, GG) + H.update(G) + assert graphs_equal(H, G) + + # update nodes only + H = self.Graph() + H.update(nodes=[3, 4]) + assert H.nodes ^ {3, 4} == set() + assert H.size() == 0 + + # update edges only + H = self.Graph() + H.update(edges=[(3, 4)]) + assert sorted(H.edges.data()) == [(3, 4, {})] + assert H.size() == 1 + + # No inputs -> exception + with pytest.raises(nx.NetworkXError): + nx.Graph().update() + + +class TestEdgeSubgraph: + """Unit tests for the :meth:`Graph.edge_subgraph` method.""" + + def setup_method(self): + # Create a path graph on five nodes. + G = nx.path_graph(5) + # Add some node, edge, and graph attributes. + for i in range(5): + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" + # Get the subgraph induced by the first and last edges. + self.G = G + self.H = G.edge_subgraph([(0, 1), (3, 4)]) + + def test_correct_nodes(self): + """Tests that the subgraph has the correct nodes.""" + assert [0, 1, 3, 4] == sorted(self.H.nodes()) + + def test_correct_edges(self): + """Tests that the subgraph has the correct edges.""" + assert [(0, 1, "edge01"), (3, 4, "edge34")] == sorted(self.H.edges(data="name")) + + def test_add_node(self): + """Tests that adding a node to the original graph does not + affect the nodes of the subgraph. + + """ + self.G.add_node(5) + assert [0, 1, 3, 4] == sorted(self.H.nodes()) + + def test_remove_node(self): + """Tests that removing a node in the original graph does + affect the nodes of the subgraph. + + """ + self.G.remove_node(0) + assert [1, 3, 4] == sorted(self.H.nodes()) + + def test_node_attr_dict(self): + """Tests that the node attribute dictionary of the two graphs is + the same object. + + """ + for v in self.H: + assert self.G.nodes[v] == self.H.nodes[v] + # Making a change to G should make a change in H and vice versa. + self.G.nodes[0]["name"] = "foo" + assert self.G.nodes[0] == self.H.nodes[0] + self.H.nodes[1]["name"] = "bar" + assert self.G.nodes[1] == self.H.nodes[1] + + def test_edge_attr_dict(self): + """Tests that the edge attribute dictionary of the two graphs is + the same object. + + """ + for u, v in self.H.edges(): + assert self.G.edges[u, v] == self.H.edges[u, v] + # Making a change to G should make a change in H and vice versa. + self.G.edges[0, 1]["name"] = "foo" + assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"] + self.H.edges[3, 4]["name"] = "bar" + assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"] + + def test_graph_attr_dict(self): + """Tests that the graph attribute dictionary of the two graphs + is the same object. + + """ + assert self.G.graph is self.H.graph diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graph_historical.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graph_historical.py new file mode 100644 index 0000000..6e80878 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graph_historical.py @@ -0,0 +1,12 @@ +"""Original NetworkX graph tests""" + +import networkx as nx + +from .historical_tests import HistoricalTests + + +class TestGraphHistorical(HistoricalTests): + @classmethod + def setup_class(cls): + HistoricalTests.setup_class() + cls.G = nx.Graph diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graphviews.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graphviews.py new file mode 100644 index 0000000..c8f0e63 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_graphviews.py @@ -0,0 +1,349 @@ +import pytest + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + +# Note: SubGraph views are not tested here. They have their own testing file + + +class TestReverseView: + def setup_method(self): + self.G = nx.path_graph(9, create_using=nx.DiGraph()) + self.rv = nx.reverse_view(self.G) + + def test_pickle(self): + import pickle + + rv = self.rv + prv = pickle.loads(pickle.dumps(rv, -1)) + assert rv._node == prv._node + assert rv._adj == prv._adj + assert rv.graph == prv.graph + + def test_contains(self): + assert (2, 3) in self.G.edges + assert (3, 2) not in self.G.edges + assert (2, 3) not in self.rv.edges + assert (3, 2) in self.rv.edges + + def test_iter(self): + expected = sorted(tuple(reversed(e)) for e in self.G.edges) + assert sorted(self.rv.edges) == expected + + def test_exceptions(self): + G = nx.Graph() + pytest.raises(nx.NetworkXNotImplemented, nx.reverse_view, G) + + def test_subclass(self): + class MyGraph(nx.DiGraph): + def my_method(self): + return "me" + + def to_directed_class(self): + return MyGraph() + + M = MyGraph() + M.add_edge(1, 2) + RM = nx.reverse_view(M) + assert RM.__class__ == MyGraph + RMC = RM.copy() + assert RMC.__class__ == MyGraph + assert RMC.has_edge(2, 1) + assert RMC.my_method() == "me" + + +class TestMultiReverseView: + def setup_method(self): + self.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) + self.G.add_edge(4, 5) + self.rv = nx.reverse_view(self.G) + + def test_pickle(self): + import pickle + + rv = self.rv + prv = pickle.loads(pickle.dumps(rv, -1)) + assert rv._node == prv._node + assert rv._adj == prv._adj + assert rv.graph == prv.graph + + def test_contains(self): + assert (2, 3, 0) in self.G.edges + assert (3, 2, 0) not in self.G.edges + assert (2, 3, 0) not in self.rv.edges + assert (3, 2, 0) in self.rv.edges + assert (5, 4, 1) in self.rv.edges + assert (4, 5, 1) not in self.rv.edges + + def test_iter(self): + expected = sorted((v, u, k) for u, v, k in self.G.edges) + assert sorted(self.rv.edges) == expected + + def test_exceptions(self): + MG = nx.MultiGraph(self.G) + pytest.raises(nx.NetworkXNotImplemented, nx.reverse_view, MG) + + +def test_generic_multitype(): + nxg = nx.graphviews + G = nx.DiGraph([(1, 2)]) + with pytest.raises(nx.NetworkXError): + nxg.generic_graph_view(G, create_using=nx.MultiGraph) + G = nx.MultiDiGraph([(1, 2)]) + with pytest.raises(nx.NetworkXError): + nxg.generic_graph_view(G, create_using=nx.DiGraph) + + +class TestToDirected: + def setup_method(self): + self.G = nx.path_graph(9) + self.dv = nx.to_directed(self.G) + self.MG = nx.path_graph(9, create_using=nx.MultiGraph()) + self.Mdv = nx.to_directed(self.MG) + + def test_directed(self): + assert not self.G.is_directed() + assert self.dv.is_directed() + + def test_already_directed(self): + dd = nx.to_directed(self.dv) + Mdd = nx.to_directed(self.Mdv) + assert edges_equal(dd.edges, self.dv.edges) + assert edges_equal(Mdd.edges, self.Mdv.edges) + + def test_pickle(self): + import pickle + + dv = self.dv + pdv = pickle.loads(pickle.dumps(dv, -1)) + assert dv._node == pdv._node + assert dv._succ == pdv._succ + assert dv._pred == pdv._pred + assert dv.graph == pdv.graph + + def test_contains(self): + assert (2, 3) in self.G.edges + assert (3, 2) in self.G.edges + assert (2, 3) in self.dv.edges + assert (3, 2) in self.dv.edges + + def test_iter(self): + revd = [tuple(reversed(e)) for e in self.G.edges] + expected = sorted(list(self.G.edges) + revd) + assert sorted(self.dv.edges) == expected + + +class TestToUndirected: + def setup_method(self): + self.DG = nx.path_graph(9, create_using=nx.DiGraph()) + self.uv = nx.to_undirected(self.DG) + self.MDG = nx.path_graph(9, create_using=nx.MultiDiGraph()) + self.Muv = nx.to_undirected(self.MDG) + + def test_directed(self): + assert self.DG.is_directed() + assert not self.uv.is_directed() + + def test_already_directed(self): + uu = nx.to_undirected(self.uv) + Muu = nx.to_undirected(self.Muv) + assert edges_equal(uu.edges, self.uv.edges) + assert edges_equal(Muu.edges, self.Muv.edges) + + def test_pickle(self): + import pickle + + uv = self.uv + puv = pickle.loads(pickle.dumps(uv, -1)) + assert uv._node == puv._node + assert uv._adj == puv._adj + assert uv.graph == puv.graph + assert hasattr(uv, "_graph") + + def test_contains(self): + assert (2, 3) in self.DG.edges + assert (3, 2) not in self.DG.edges + assert (2, 3) in self.uv.edges + assert (3, 2) in self.uv.edges + + def test_iter(self): + expected = sorted(self.DG.edges) + assert sorted(self.uv.edges) == expected + + +class TestChainsOfViews: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.DG = nx.path_graph(9, create_using=nx.DiGraph()) + cls.MG = nx.path_graph(9, create_using=nx.MultiGraph()) + cls.MDG = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.Gv = nx.to_undirected(cls.DG) + cls.DGv = nx.to_directed(cls.G) + cls.MGv = nx.to_undirected(cls.MDG) + cls.MDGv = nx.to_directed(cls.MG) + cls.Rv = cls.DG.reverse() + cls.MRv = cls.MDG.reverse() + cls.graphs = [ + cls.G, + cls.DG, + cls.MG, + cls.MDG, + cls.Gv, + cls.DGv, + cls.MGv, + cls.MDGv, + cls.Rv, + cls.MRv, + ] + for G in cls.graphs: + G.edges, G.nodes, G.degree + + def test_pickle(self): + import pickle + + for G in self.graphs: + H = pickle.loads(pickle.dumps(G, -1)) + assert edges_equal(H.edges, G.edges) + assert nodes_equal(H.nodes, G.nodes) + + def test_subgraph_of_subgraph(self): + SGv = nx.subgraph(self.G, range(3, 7)) + SDGv = nx.subgraph(self.DG, range(3, 7)) + SMGv = nx.subgraph(self.MG, range(3, 7)) + SMDGv = nx.subgraph(self.MDG, range(3, 7)) + for G in self.graphs + [SGv, SDGv, SMGv, SMDGv]: + SG = nx.induced_subgraph(G, [4, 5, 6]) + assert list(SG) == [4, 5, 6] + SSG = SG.subgraph([6, 7]) + assert list(SSG) == [6] + # subgraph-subgraph chain is short-cut in base class method + assert SSG._graph is G + + def test_restricted_induced_subgraph_chains(self): + """Test subgraph chains that both restrict and show nodes/edges. + + A restricted_view subgraph should allow induced subgraphs using + G.subgraph that automagically without a chain (meaning the result + is a subgraph view of the original graph not a subgraph-of-subgraph. + """ + hide_nodes = [3, 4, 5] + hide_edges = [(6, 7)] + RG = nx.restricted_view(self.G, hide_nodes, hide_edges) + nodes = [4, 5, 6, 7, 8] + SG = nx.induced_subgraph(RG, nodes) + SSG = RG.subgraph(nodes) + assert RG._graph is self.G + assert SSG._graph is self.G + assert SG._graph is RG + assert edges_equal(SG.edges, SSG.edges) + # should be same as morphing the graph + CG = self.G.copy() + CG.remove_nodes_from(hide_nodes) + CG.remove_edges_from(hide_edges) + assert edges_equal(CG.edges(nodes), SSG.edges) + CG.remove_nodes_from([0, 1, 2, 3]) + assert edges_equal(CG.edges, SSG.edges) + # switch order: subgraph first, then restricted view + SSSG = self.G.subgraph(nodes) + RSG = nx.restricted_view(SSSG, hide_nodes, hide_edges) + assert RSG._graph is not self.G + assert edges_equal(RSG.edges, CG.edges) + + def test_subgraph_copy(self): + for origG in self.graphs: + G = nx.Graph(origG) + SG = G.subgraph([4, 5, 6]) + H = SG.copy() + assert type(G) is type(H) + + def test_subgraph_todirected(self): + SG = nx.induced_subgraph(self.G, [4, 5, 6]) + SSG = SG.to_directed() + assert sorted(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 4), (5, 6), (6, 5)] + + def test_subgraph_toundirected(self): + SG = nx.induced_subgraph(self.G, [4, 5, 6]) + SSG = SG.to_undirected() + assert list(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 6)] + + def test_reverse_subgraph_toundirected(self): + G = self.DG.reverse(copy=False) + SG = G.subgraph([4, 5, 6]) + SSG = SG.to_undirected() + assert list(SSG) == [4, 5, 6] + assert sorted(SSG.edges) == [(4, 5), (5, 6)] + + def test_reverse_reverse_copy(self): + G = self.DG.reverse(copy=False) + H = G.reverse(copy=True) + assert H.nodes == self.DG.nodes + assert H.edges == self.DG.edges + G = self.MDG.reverse(copy=False) + H = G.reverse(copy=True) + assert H.nodes == self.MDG.nodes + assert H.edges == self.MDG.edges + + def test_subgraph_edgesubgraph_toundirected(self): + G = self.G.copy() + SG = G.subgraph([4, 5, 6]) + SSG = SG.edge_subgraph([(4, 5), (5, 4)]) + USSG = SSG.to_undirected() + assert list(USSG) == [4, 5] + assert sorted(USSG.edges) == [(4, 5)] + + def test_copy_subgraph(self): + G = self.G.copy() + SG = G.subgraph([4, 5, 6]) + CSG = SG.copy(as_view=True) + DCSG = SG.copy(as_view=False) + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_disubgraph(self): + G = self.DG.copy() + SG = G.subgraph([4, 5, 6]) + CSG = SG.copy(as_view=True) + DCSG = SG.copy(as_view=False) + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_multidisubgraph(self): + G = self.MDG.copy() + SG = G.subgraph([4, 5, 6]) + CSG = SG.copy(as_view=True) + DCSG = SG.copy(as_view=False) + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_multisubgraph(self): + G = self.MG.copy() + SG = G.subgraph([4, 5, 6]) + CSG = SG.copy(as_view=True) + DCSG = SG.copy(as_view=False) + assert hasattr(CSG, "_graph") # is a view + assert not hasattr(DCSG, "_graph") # not a view + + def test_copy_of_view(self): + G = nx.MultiGraph(self.MGv) + assert G.__class__.__name__ == "MultiGraph" + G = G.copy(as_view=True) + assert G.__class__.__name__ == "MultiGraph" + + def test_subclass(self): + class MyGraph(nx.DiGraph): + def my_method(self): + return "me" + + def to_directed_class(self): + return MyGraph() + + for origG in self.graphs: + G = MyGraph(origG) + SG = G.subgraph([4, 5, 6]) + H = SG.copy() + assert SG.my_method() == "me" + assert H.my_method() == "me" + assert 3 not in H or 3 in SG diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_multidigraph.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_multidigraph.py new file mode 100644 index 0000000..fc0bd54 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_multidigraph.py @@ -0,0 +1,459 @@ +from collections import UserDict + +import pytest + +import networkx as nx +from networkx.utils import edges_equal + +from .test_multigraph import BaseMultiGraphTester +from .test_multigraph import TestEdgeSubgraph as _TestMultiGraphEdgeSubgraph +from .test_multigraph import TestMultiGraph as _TestMultiGraph + + +class BaseMultiDiGraphTester(BaseMultiGraphTester): + def test_edges(self): + G = self.K3 + edges = [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.edges()) == edges + assert sorted(G.edges(0)) == [(0, 1), (0, 2)] + pytest.raises((KeyError, nx.NetworkXError), G.edges, -1) + + def test_edges_data(self): + G = self.K3 + edges = [(0, 1, {}), (0, 2, {}), (1, 0, {}), (1, 2, {}), (2, 0, {}), (2, 1, {})] + assert sorted(G.edges(data=True)) == edges + assert sorted(G.edges(0, data=True)) == [(0, 1, {}), (0, 2, {})] + pytest.raises((KeyError, nx.NetworkXError), G.neighbors, -1) + + def test_edges_multi(self): + G = self.K3 + assert sorted(G.edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.edges(0)) == [(0, 1), (0, 2)] + G.add_edge(0, 1) + assert sorted(G.edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] + + def test_out_edges(self): + G = self.K3 + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] + pytest.raises((KeyError, nx.NetworkXError), G.out_edges, -1) + assert sorted(G.out_edges(0, keys=True)) == [(0, 1, 0), (0, 2, 0)] + + def test_out_edges_multi(self): + G = self.K3 + assert sorted(G.out_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.out_edges(0)) == [(0, 1), (0, 2)] + G.add_edge(0, 1, 2) + assert sorted(G.out_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] + + def test_out_edges_data(self): + G = self.K3 + assert sorted(G.edges(0, data=True)) == [(0, 1, {}), (0, 2, {})] + G.remove_edge(0, 1) + G.add_edge(0, 1, data=1) + assert sorted(G.edges(0, data=True)) == [(0, 1, {"data": 1}), (0, 2, {})] + assert sorted(G.edges(0, data="data")) == [(0, 1, 1), (0, 2, None)] + assert sorted(G.edges(0, data="data", default=-1)) == [(0, 1, 1), (0, 2, -1)] + + def test_in_edges(self): + G = self.K3 + assert sorted(G.in_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.in_edges(0)) == [(1, 0), (2, 0)] + pytest.raises((KeyError, nx.NetworkXError), G.in_edges, -1) + G.add_edge(0, 1, 2) + assert sorted(G.in_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] + assert sorted(G.in_edges(0, keys=True)) == [(1, 0, 0), (2, 0, 0)] + + def test_in_edges_no_keys(self): + G = self.K3 + assert sorted(G.in_edges()) == [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)] + assert sorted(G.in_edges(0)) == [(1, 0), (2, 0)] + G.add_edge(0, 1, 2) + assert sorted(G.in_edges()) == [ + (0, 1), + (0, 1), + (0, 2), + (1, 0), + (1, 2), + (2, 0), + (2, 1), + ] + + assert sorted(G.in_edges(data=True, keys=False)) == [ + (0, 1, {}), + (0, 1, {}), + (0, 2, {}), + (1, 0, {}), + (1, 2, {}), + (2, 0, {}), + (2, 1, {}), + ] + + def test_in_edges_data(self): + G = self.K3 + assert sorted(G.in_edges(0, data=True)) == [(1, 0, {}), (2, 0, {})] + G.remove_edge(1, 0) + G.add_edge(1, 0, data=1) + assert sorted(G.in_edges(0, data=True)) == [(1, 0, {"data": 1}), (2, 0, {})] + assert sorted(G.in_edges(0, data="data")) == [(1, 0, 1), (2, 0, None)] + assert sorted(G.in_edges(0, data="data", default=-1)) == [(1, 0, 1), (2, 0, -1)] + + def is_shallow(self, H, G): + # graph + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] == H.graph["foo"] + # node + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + # edge + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + + def is_deep(self, H, G): + # graph + assert G.graph["foo"] == H.graph["foo"] + G.graph["foo"].append(1) + assert G.graph["foo"] != H.graph["foo"] + # node + assert G.nodes[0]["foo"] == H.nodes[0]["foo"] + G.nodes[0]["foo"].append(1) + assert G.nodes[0]["foo"] != H.nodes[0]["foo"] + # edge + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] != H[1][2][0]["foo"] + + def test_to_undirected(self): + # MultiDiGraph -> MultiGraph changes number of edges so it is + # not a copy operation... use is_shallow, not is_shallow_copy + G = self.K3 + self.add_attributes(G) + H = nx.MultiGraph(G) + # self.is_shallow(H,G) + # the result is traversal order dependent so we + # can't use the is_shallow() test here. + try: + assert edges_equal(H.edges(), [(0, 1), (1, 2), (2, 0)]) + except AssertionError: + assert edges_equal(H.edges(), [(0, 1), (1, 2), (1, 2), (2, 0)]) + H = G.to_undirected() + self.is_deep(H, G) + + def test_has_successor(self): + G = self.K3 + assert G.has_successor(0, 1) + assert not G.has_successor(0, -1) + + def test_successors(self): + G = self.K3 + assert sorted(G.successors(0)) == [1, 2] + pytest.raises((KeyError, nx.NetworkXError), G.successors, -1) + + def test_has_predecessor(self): + G = self.K3 + assert G.has_predecessor(0, 1) + assert not G.has_predecessor(0, -1) + + def test_predecessors(self): + G = self.K3 + assert sorted(G.predecessors(0)) == [1, 2] + pytest.raises((KeyError, nx.NetworkXError), G.predecessors, -1) + + def test_degree(self): + G = self.K3 + assert sorted(G.degree()) == [(0, 4), (1, 4), (2, 4)] + assert dict(G.degree()) == {0: 4, 1: 4, 2: 4} + assert G.degree(0) == 4 + assert list(G.degree(iter([0]))) == [(0, 4)] + G.add_edge(0, 1, weight=0.3, other=1.2) + assert sorted(G.degree(weight="weight")) == [(0, 4.3), (1, 4.3), (2, 4)] + assert sorted(G.degree(weight="other")) == [(0, 5.2), (1, 5.2), (2, 4)] + + def test_in_degree(self): + G = self.K3 + assert sorted(G.in_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.in_degree()) == {0: 2, 1: 2, 2: 2} + assert G.in_degree(0) == 2 + assert list(G.in_degree(iter([0]))) == [(0, 2)] + assert G.in_degree(0, weight="weight") == 2 + + def test_out_degree(self): + G = self.K3 + assert sorted(G.out_degree()) == [(0, 2), (1, 2), (2, 2)] + assert dict(G.out_degree()) == {0: 2, 1: 2, 2: 2} + assert G.out_degree(0) == 2 + assert list(G.out_degree(iter([0]))) == [(0, 2)] + assert G.out_degree(0, weight="weight") == 2 + + def test_size(self): + G = self.K3 + assert G.size() == 6 + assert G.number_of_edges() == 6 + G.add_edge(0, 1, weight=0.3, other=1.2) + assert round(G.size(weight="weight"), 2) == 6.3 + assert round(G.size(weight="other"), 2) == 7.2 + + def test_to_undirected_reciprocal(self): + G = self.Graph() + G.add_edge(1, 2) + assert G.to_undirected().has_edge(1, 2) + assert not G.to_undirected(reciprocal=True).has_edge(1, 2) + G.add_edge(2, 1) + assert G.to_undirected(reciprocal=True).has_edge(1, 2) + + def test_reverse_copy(self): + G = nx.MultiDiGraph([(0, 1), (0, 1)]) + R = G.reverse() + assert sorted(R.edges()) == [(1, 0), (1, 0)] + R.remove_edge(1, 0) + assert sorted(R.edges()) == [(1, 0)] + assert sorted(G.edges()) == [(0, 1), (0, 1)] + + def test_reverse_nocopy(self): + G = nx.MultiDiGraph([(0, 1), (0, 1)]) + R = G.reverse(copy=False) + assert sorted(R.edges()) == [(1, 0), (1, 0)] + pytest.raises(nx.NetworkXError, R.remove_edge, 1, 0) + + def test_di_attributes_cached(self): + G = self.K3.copy() + assert id(G.in_edges) == id(G.in_edges) + assert id(G.out_edges) == id(G.out_edges) + assert id(G.in_degree) == id(G.in_degree) + assert id(G.out_degree) == id(G.out_degree) + assert id(G.succ) == id(G.succ) + assert id(G.pred) == id(G.pred) + + +class TestMultiDiGraph(BaseMultiDiGraphTester, _TestMultiGraph): + def setup_method(self): + self.Graph = nx.MultiDiGraph + # build K3 + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._succ = {0: {}, 1: {}, 2: {}} + # K3._adj is synced with K3._succ + self.K3._pred = {0: {}, 1: {}, 2: {}} + for u in self.k3nodes: + for v in self.k3nodes: + if u == v: + continue + d = {0: {}} + self.K3._succ[u][v] = d + self.K3._pred[v][u] = d + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + def test_add_edge(self): + G = self.Graph() + G.add_edge(0, 1) + assert G._adj == {0: {1: {0: {}}}, 1: {}} + assert G._succ == {0: {1: {0: {}}}, 1: {}} + assert G._pred == {0: {}, 1: {0: {0: {}}}} + G = self.Graph() + G.add_edge(*(0, 1)) + assert G._adj == {0: {1: {0: {}}}, 1: {}} + assert G._succ == {0: {1: {0: {}}}, 1: {}} + assert G._pred == {0: {}, 1: {0: {0: {}}}} + with pytest.raises(ValueError, match="None cannot be a node"): + G.add_edge(None, 3) + + def test_add_edges_from(self): + G = self.Graph() + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})]) + assert G._adj == {0: {1: {0: {}, 1: {"weight": 3}}}, 1: {}} + assert G._succ == {0: {1: {0: {}, 1: {"weight": 3}}}, 1: {}} + assert G._pred == {0: {}, 1: {0: {0: {}, 1: {"weight": 3}}}} + + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})], weight=2) + assert G._succ == { + 0: {1: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + 1: {}, + } + assert G._pred == { + 0: {}, + 1: {0: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + } + + G = self.Graph() + edges = [ + (0, 1, {"weight": 3}), + (0, 1, (("weight", 2),)), + (0, 1, 5), + (0, 1, "s"), + ] + G.add_edges_from(edges) + keydict = {0: {"weight": 3}, 1: {"weight": 2}, 5: {}, "s": {}} + assert G._succ == {0: {1: keydict}, 1: {}} + assert G._pred == {1: {0: keydict}, 0: {}} + + # too few in tuple + pytest.raises(nx.NetworkXError, G.add_edges_from, [(0,)]) + # too many in tuple + pytest.raises(nx.NetworkXError, G.add_edges_from, [(0, 1, 2, 3, 4)]) + # not a tuple + pytest.raises(TypeError, G.add_edges_from, [0]) + with pytest.raises(ValueError, match="None cannot be a node"): + G.add_edges_from([(None, 3), (3, 2)]) + + def test_remove_edge(self): + G = self.K3 + G.remove_edge(0, 1) + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) + pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, 0, 2, key=1) + + def test_remove_multiedge(self): + G = self.K3 + G.add_edge(0, 1, key="parallel edge") + G.remove_edge(0, 1, key="parallel edge") + assert G._adj == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + + assert G._succ == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + G.remove_edge(0, 1) + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + pytest.raises((KeyError, nx.NetworkXError), G.remove_edge, -1, 0) + + def test_remove_edges_from(self): + G = self.K3 + G.remove_edges_from([(0, 1)]) + assert G._succ == { + 0: {2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + assert G._pred == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + G.remove_edges_from([(0, 0)]) # silent fail + + +class TestEdgeSubgraph(_TestMultiGraphEdgeSubgraph): + """Unit tests for the :meth:`MultiDiGraph.edge_subgraph` method.""" + + def setup_method(self): + # Create a quadruply-linked path graph on five nodes. + G = nx.MultiDiGraph() + nx.add_path(G, range(5)) + nx.add_path(G, range(5)) + nx.add_path(G, reversed(range(5))) + nx.add_path(G, reversed(range(5))) + # Add some node, edge, and graph attributes. + for i in range(5): + G.nodes[i]["name"] = f"node{i}" + G.adj[0][1][0]["name"] = "edge010" + G.adj[0][1][1]["name"] = "edge011" + G.adj[3][4][0]["name"] = "edge340" + G.adj[3][4][1]["name"] = "edge341" + G.graph["name"] = "graph" + # Get the subgraph induced by one of the first edges and one of + # the last edges. + self.G = G + self.H = G.edge_subgraph([(0, 1, 0), (3, 4, 1)]) + + +class CustomDictClass(UserDict): + pass + + +class MultiDiGraphSubClass(nx.MultiDiGraph): + node_dict_factory = CustomDictClass # type: ignore[assignment] + node_attr_dict_factory = CustomDictClass # type: ignore[assignment] + adjlist_outer_dict_factory = CustomDictClass # type: ignore[assignment] + adjlist_inner_dict_factory = CustomDictClass # type: ignore[assignment] + edge_key_dict_factory = CustomDictClass # type: ignore[assignment] + edge_attr_dict_factory = CustomDictClass # type: ignore[assignment] + graph_attr_dict_factory = CustomDictClass # type: ignore[assignment] + + +class TestMultiDiGraphSubclass(TestMultiDiGraph): + def setup_method(self): + self.Graph = MultiDiGraphSubClass + # build K3 + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._succ = self.K3.adjlist_outer_dict_factory( + { + 0: self.K3.adjlist_inner_dict_factory(), + 1: self.K3.adjlist_inner_dict_factory(), + 2: self.K3.adjlist_inner_dict_factory(), + } + ) + # K3._adj is synced with K3._succ + self.K3._pred = {0: {}, 1: {}, 2: {}} + for u in self.k3nodes: + for v in self.k3nodes: + if u == v: + continue + d = {0: {}} + self.K3._succ[u][v] = d + self.K3._pred[v][u] = d + self.K3._node = self.K3.node_dict_factory() + self.K3._node[0] = self.K3.node_attr_dict_factory() + self.K3._node[1] = self.K3.node_attr_dict_factory() + self.K3._node[2] = self.K3.node_attr_dict_factory() diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_multigraph.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_multigraph.py new file mode 100644 index 0000000..cd912d1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_multigraph.py @@ -0,0 +1,528 @@ +from collections import UserDict + +import pytest + +import networkx as nx +from networkx.utils import edges_equal + +from .test_graph import BaseAttrGraphTester +from .test_graph import TestGraph as _TestGraph + + +class BaseMultiGraphTester(BaseAttrGraphTester): + def test_has_edge(self): + G = self.K3 + assert G.has_edge(0, 1) + assert not G.has_edge(0, -1) + assert G.has_edge(0, 1, 0) + assert not G.has_edge(0, 1, 1) + + def test_get_edge_data(self): + G = self.K3 + assert G.get_edge_data(0, 1) == {0: {}} + assert G[0][1] == {0: {}} + assert G[0][1][0] == {} + assert G.get_edge_data(10, 20) is None + assert G.get_edge_data(0, 1, 0) == {} + + def test_adjacency(self): + G = self.K3 + assert dict(G.adjacency()) == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + + def deepcopy_edge_attr(self, H, G): + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] != H[1][2][0]["foo"] + + def shallow_copy_edge_attr(self, H, G): + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + G[1][2][0]["foo"].append(1) + assert G[1][2][0]["foo"] == H[1][2][0]["foo"] + + def graphs_equal(self, H, G): + assert G._adj == H._adj + assert G._node == H._node + assert G.graph == H.graph + assert G.name == H.name + if not G.is_directed() and not H.is_directed(): + assert H._adj[1][2][0] is H._adj[2][1][0] + assert G._adj[1][2][0] is G._adj[2][1][0] + else: # at least one is directed + if not G.is_directed(): + G._pred = G._adj + G._succ = G._adj + if not H.is_directed(): + H._pred = H._adj + H._succ = H._adj + assert G._pred == H._pred + assert G._succ == H._succ + assert H._succ[1][2][0] is H._pred[2][1][0] + assert G._succ[1][2][0] is G._pred[2][1][0] + + def same_attrdict(self, H, G): + # same attrdict in the edgedata + old_foo = H[1][2][0]["foo"] + H.adj[1][2][0]["foo"] = "baz" + assert G._adj == H._adj + H.adj[1][2][0]["foo"] = old_foo + assert G._adj == H._adj + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G._node == H._node + H.nodes[0]["foo"] = old_foo + assert G._node == H._node + + def different_attrdict(self, H, G): + # used by graph_equal_but_different + old_foo = H[1][2][0]["foo"] + H.adj[1][2][0]["foo"] = "baz" + assert G._adj != H._adj + H.adj[1][2][0]["foo"] = old_foo + assert G._adj == H._adj + + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G._node != H._node + H.nodes[0]["foo"] = old_foo + assert G._node == H._node + + def test_to_undirected(self): + G = self.K3 + self.add_attributes(G) + H = nx.MultiGraph(G) + self.is_shallow_copy(H, G) + H = G.to_undirected() + self.is_deepcopy(H, G) + + def test_to_directed(self): + G = self.K3 + self.add_attributes(G) + H = nx.MultiDiGraph(G) + self.is_shallow_copy(H, G) + H = G.to_directed() + self.is_deepcopy(H, G) + + def test_number_of_edges_selfloops(self): + G = self.K3 + G.add_edge(0, 0) + G.add_edge(0, 0) + G.add_edge(0, 0, key="parallel edge") + G.remove_edge(0, 0, key="parallel edge") + assert G.number_of_edges(0, 0) == 2 + G.remove_edge(0, 0) + assert G.number_of_edges(0, 0) == 1 + + def test_edge_lookup(self): + G = self.Graph() + G.add_edge(1, 2, foo="bar") + G.add_edge(1, 2, "key", foo="biz") + assert edges_equal(G.edges[1, 2, 0], {"foo": "bar"}) + assert edges_equal(G.edges[1, 2, "key"], {"foo": "biz"}) + + def test_edge_attr(self): + G = self.Graph() + G.add_edge(1, 2, key="k1", foo="bar") + G.add_edge(1, 2, key="k2", foo="baz") + assert isinstance(G.get_edge_data(1, 2), G.edge_key_dict_factory) + assert all( + isinstance(d, G.edge_attr_dict_factory) for u, v, d in G.edges(data=True) + ) + assert edges_equal( + G.edges(keys=True, data=True), + [(1, 2, "k1", {"foo": "bar"}), (1, 2, "k2", {"foo": "baz"})], + ) + assert edges_equal( + G.edges(keys=True, data="foo"), [(1, 2, "k1", "bar"), (1, 2, "k2", "baz")] + ) + + def test_edge_attr4(self): + G = self.Graph() + G.add_edge(1, 2, key=0, data=7, spam="bar", bar="foo") + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 7, "spam": "bar", "bar": "foo"})] + ) + G[1][2][0]["data"] = 10 # OK to set data like this + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 10, "spam": "bar", "bar": "foo"})] + ) + + G.adj[1][2][0]["data"] = 20 + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 20, "spam": "bar", "bar": "foo"})] + ) + G.edges[1, 2, 0]["data"] = 21 # another spelling, "edge" + assert edges_equal( + G.edges(data=True), [(1, 2, {"data": 21, "spam": "bar", "bar": "foo"})] + ) + G.adj[1][2][0]["listdata"] = [20, 200] + G.adj[1][2][0]["weight"] = 20 + assert edges_equal( + G.edges(data=True), + [ + ( + 1, + 2, + { + "data": 21, + "spam": "bar", + "bar": "foo", + "listdata": [20, 200], + "weight": 20, + }, + ) + ], + ) + + +class TestMultiGraph(BaseMultiGraphTester, _TestGraph): + def setup_method(self): + self.Graph = nx.MultiGraph + # build K3 + ed1, ed2, ed3 = ({0: {}}, {0: {}}, {0: {}}) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._adj = self.k3adj + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + def test_data_input(self): + G = self.Graph({1: [2], 2: [1]}, name="test") + assert G.name == "test" + expected = [(1, {2: {0: {}}}), (2, {1: {0: {}}})] + assert sorted(G.adj.items()) == expected + + def test_data_multigraph_input(self): + # standard case with edge keys and edge data + edata0 = {"w": 200, "s": "foo"} + edata1 = {"w": 201, "s": "bar"} + keydict = {0: edata0, 1: edata1} + dododod = {"a": {"b": keydict}} + + multiple_edge = [("a", "b", 0, edata0), ("a", "b", 1, edata1)] + single_edge = [("a", "b", 0, keydict)] + + G = self.Graph(dododod, multigraph_input=True) + assert list(G.edges(keys=True, data=True)) == multiple_edge + G = self.Graph(dododod, multigraph_input=None) + assert list(G.edges(keys=True, data=True)) == multiple_edge + G = self.Graph(dododod, multigraph_input=False) + assert list(G.edges(keys=True, data=True)) == single_edge + + # test round-trip to_dict_of_dict and MultiGraph constructor + G = self.Graph(dododod, multigraph_input=True) + H = self.Graph(nx.to_dict_of_dicts(G)) + assert nx.is_isomorphic(G, H) is True # test that default is True + for mgi in [True, False]: + H = self.Graph(nx.to_dict_of_dicts(G), multigraph_input=mgi) + assert nx.is_isomorphic(G, H) == mgi + + # Set up cases for when incoming_graph_data is not multigraph_input + etraits = {"w": 200, "s": "foo"} + egraphics = {"color": "blue", "shape": "box"} + edata = {"traits": etraits, "graphics": egraphics} + dodod1 = {"a": {"b": edata}} + dodod2 = {"a": {"b": etraits}} + dodod3 = {"a": {"b": {"traits": etraits, "s": "foo"}}} + dol = {"a": ["b"]} + + multiple_edge = [("a", "b", "traits", etraits), ("a", "b", "graphics", egraphics)] + single_edge = [("a", "b", 0, {})] # type: ignore[var-annotated] + single_edge1 = [("a", "b", 0, edata)] + single_edge2 = [("a", "b", 0, etraits)] + single_edge3 = [("a", "b", 0, {"traits": etraits, "s": "foo"})] + + cases = [ # (dod, mgi, edges) + (dodod1, True, multiple_edge), + (dodod1, False, single_edge1), + (dodod2, False, single_edge2), + (dodod3, False, single_edge3), + (dol, False, single_edge), + ] + + @pytest.mark.parametrize("dod, mgi, edges", cases) + def test_non_multigraph_input(self, dod, mgi, edges): + G = self.Graph(dod, multigraph_input=mgi) + assert list(G.edges(keys=True, data=True)) == edges + G = nx.to_networkx_graph(dod, create_using=self.Graph, multigraph_input=mgi) + assert list(G.edges(keys=True, data=True)) == edges + + mgi_none_cases = [ + (dodod1, multiple_edge), + (dodod2, single_edge2), + (dodod3, single_edge3), + ] + + @pytest.mark.parametrize("dod, edges", mgi_none_cases) + def test_non_multigraph_input_mgi_none(self, dod, edges): + # test constructor without to_networkx_graph for mgi=None + G = self.Graph(dod) + assert list(G.edges(keys=True, data=True)) == edges + + raise_cases = [dodod2, dodod3, dol] + + @pytest.mark.parametrize("dod", raise_cases) + def test_non_multigraph_input_raise(self, dod): + # cases where NetworkXError is raised + pytest.raises(nx.NetworkXError, self.Graph, dod, multigraph_input=True) + pytest.raises( + nx.NetworkXError, + nx.to_networkx_graph, + dod, + create_using=self.Graph, + multigraph_input=True, + ) + + def test_getitem(self): + G = self.K3 + assert G[0] == {1: {0: {}}, 2: {0: {}}} + with pytest.raises(KeyError): + G.__getitem__("j") + with pytest.raises(TypeError): + G.__getitem__(["A"]) + + def test_remove_node(self): + G = self.K3 + G.remove_node(0) + assert G.adj == {1: {2: {0: {}}}, 2: {1: {0: {}}}} + with pytest.raises(nx.NetworkXError): + G.remove_node(-1) + + def test_add_edge(self): + G = self.Graph() + G.add_edge(0, 1) + assert G.adj == {0: {1: {0: {}}}, 1: {0: {0: {}}}} + G = self.Graph() + G.add_edge(*(0, 1)) + assert G.adj == {0: {1: {0: {}}}, 1: {0: {0: {}}}} + G = self.Graph() + with pytest.raises(ValueError): + G.add_edge(None, "anything") + + def test_add_edge_conflicting_key(self): + G = self.Graph() + G.add_edge(0, 1, key=1) + G.add_edge(0, 1) + assert G.number_of_edges() == 2 + G = self.Graph() + G.add_edges_from([(0, 1, 1, {})]) + G.add_edges_from([(0, 1)]) + assert G.number_of_edges() == 2 + + def test_add_edges_from(self): + G = self.Graph() + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})]) + assert G.adj == { + 0: {1: {0: {}, 1: {"weight": 3}}}, + 1: {0: {0: {}, 1: {"weight": 3}}}, + } + G.add_edges_from([(0, 1), (0, 1, {"weight": 3})], weight=2) + assert G.adj == { + 0: {1: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + 1: {0: {0: {}, 1: {"weight": 3}, 2: {"weight": 2}, 3: {"weight": 3}}}, + } + G = self.Graph() + edges = [ + (0, 1, {"weight": 3}), + (0, 1, (("weight", 2),)), + (0, 1, 5), + (0, 1, "s"), + ] + G.add_edges_from(edges) + keydict = {0: {"weight": 3}, 1: {"weight": 2}, 5: {}, "s": {}} + assert G._adj == {0: {1: keydict}, 1: {0: keydict}} + + # too few in tuple + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0,)]) + # too many in tuple + with pytest.raises(nx.NetworkXError): + G.add_edges_from([(0, 1, 2, 3, 4)]) + # not a tuple + with pytest.raises(TypeError): + G.add_edges_from([0]) + + def test_multigraph_add_edges_from_four_tuple_misordered(self): + """add_edges_from expects 4-tuples of the format (u, v, key, data_dict). + + Ensure 4-tuples of form (u, v, data_dict, key) raise exception. + """ + G = nx.MultiGraph() + with pytest.raises(TypeError): + # key/data values flipped in 4-tuple + G.add_edges_from([(0, 1, {"color": "red"}, 0)]) + + def test_remove_edge(self): + G = self.K3 + G.remove_edge(0, 1) + assert G.adj == {0: {2: {0: {}}}, 1: {2: {0: {}}}, 2: {0: {0: {}}, 1: {0: {}}}} + + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) + with pytest.raises(nx.NetworkXError): + G.remove_edge(0, 2, key=1) + + def test_remove_edges_from(self): + G = self.K3.copy() + G.remove_edges_from([(0, 1)]) + kd = {0: {}} + assert G.adj == {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}} + G.remove_edges_from([(0, 0)]) # silent fail + self.K3.add_edge(0, 1) + G = self.K3.copy() + G.remove_edges_from(list(G.edges(data=True, keys=True))) + assert G.adj == {0: {}, 1: {}, 2: {}} + G = self.K3.copy() + G.remove_edges_from(list(G.edges(data=False, keys=True))) + assert G.adj == {0: {}, 1: {}, 2: {}} + G = self.K3.copy() + G.remove_edges_from(list(G.edges(data=False, keys=False))) + assert G.adj == {0: {}, 1: {}, 2: {}} + G = self.K3.copy() + G.remove_edges_from([(0, 1, 0), (0, 2, 0, {}), (1, 2)]) + assert G.adj == {0: {1: {1: {}}}, 1: {0: {1: {}}}, 2: {}} + + def test_remove_multiedge(self): + G = self.K3 + G.add_edge(0, 1, key="parallel edge") + G.remove_edge(0, 1, key="parallel edge") + assert G.adj == { + 0: {1: {0: {}}, 2: {0: {}}}, + 1: {0: {0: {}}, 2: {0: {}}}, + 2: {0: {0: {}}, 1: {0: {}}}, + } + G.remove_edge(0, 1) + kd = {0: {}} + assert G.adj == {0: {2: kd}, 1: {2: kd}, 2: {0: kd, 1: kd}} + with pytest.raises(nx.NetworkXError): + G.remove_edge(-1, 0) + + +class TestEdgeSubgraph: + """Unit tests for the :meth:`MultiGraph.edge_subgraph` method.""" + + def setup_method(self): + # Create a doubly-linked path graph on five nodes. + G = nx.MultiGraph() + nx.add_path(G, range(5)) + nx.add_path(G, range(5)) + # Add some node, edge, and graph attributes. + for i in range(5): + G.nodes[i]["name"] = f"node{i}" + G.adj[0][1][0]["name"] = "edge010" + G.adj[0][1][1]["name"] = "edge011" + G.adj[3][4][0]["name"] = "edge340" + G.adj[3][4][1]["name"] = "edge341" + G.graph["name"] = "graph" + # Get the subgraph induced by one of the first edges and one of + # the last edges. + self.G = G + self.H = G.edge_subgraph([(0, 1, 0), (3, 4, 1)]) + + def test_correct_nodes(self): + """Tests that the subgraph has the correct nodes.""" + assert [0, 1, 3, 4] == sorted(self.H.nodes()) + + def test_correct_edges(self): + """Tests that the subgraph has the correct edges.""" + assert [(0, 1, 0, "edge010"), (3, 4, 1, "edge341")] == sorted( + self.H.edges(keys=True, data="name") + ) + + def test_add_node(self): + """Tests that adding a node to the original graph does not + affect the nodes of the subgraph. + + """ + self.G.add_node(5) + assert [0, 1, 3, 4] == sorted(self.H.nodes()) + + def test_remove_node(self): + """Tests that removing a node in the original graph does + affect the nodes of the subgraph. + + """ + self.G.remove_node(0) + assert [1, 3, 4] == sorted(self.H.nodes()) + + def test_node_attr_dict(self): + """Tests that the node attribute dictionary of the two graphs is + the same object. + + """ + for v in self.H: + assert self.G.nodes[v] == self.H.nodes[v] + # Making a change to G should make a change in H and vice versa. + self.G.nodes[0]["name"] = "foo" + assert self.G.nodes[0] == self.H.nodes[0] + self.H.nodes[1]["name"] = "bar" + assert self.G.nodes[1] == self.H.nodes[1] + + def test_edge_attr_dict(self): + """Tests that the edge attribute dictionary of the two graphs is + the same object. + + """ + for u, v, k in self.H.edges(keys=True): + assert self.G._adj[u][v][k] == self.H._adj[u][v][k] + # Making a change to G should make a change in H and vice versa. + self.G._adj[0][1][0]["name"] = "foo" + assert self.G._adj[0][1][0]["name"] == self.H._adj[0][1][0]["name"] + self.H._adj[3][4][1]["name"] = "bar" + assert self.G._adj[3][4][1]["name"] == self.H._adj[3][4][1]["name"] + + def test_graph_attr_dict(self): + """Tests that the graph attribute dictionary of the two graphs + is the same object. + + """ + assert self.G.graph is self.H.graph + + +class CustomDictClass(UserDict): + pass + + +class MultiGraphSubClass(nx.MultiGraph): + node_dict_factory = CustomDictClass # type: ignore[assignment] + node_attr_dict_factory = CustomDictClass # type: ignore[assignment] + adjlist_outer_dict_factory = CustomDictClass # type: ignore[assignment] + adjlist_inner_dict_factory = CustomDictClass # type: ignore[assignment] + edge_key_dict_factory = CustomDictClass # type: ignore[assignment] + edge_attr_dict_factory = CustomDictClass # type: ignore[assignment] + graph_attr_dict_factory = CustomDictClass # type: ignore[assignment] + + +class TestMultiGraphSubclass(TestMultiGraph): + def setup_method(self): + self.Graph = MultiGraphSubClass + # build K3 + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._adj = self.K3.adjlist_outer_dict_factory( + { + 0: self.K3.adjlist_inner_dict_factory(), + 1: self.K3.adjlist_inner_dict_factory(), + 2: self.K3.adjlist_inner_dict_factory(), + } + ) + self.K3._pred = {0: {}, 1: {}, 2: {}} + for u in self.k3nodes: + for v in self.k3nodes: + if u != v: + d = {0: {}} + self.K3._adj[u][v] = d + self.K3._adj[v][u] = d + self.K3._node = self.K3.node_dict_factory() + self.K3._node[0] = self.K3.node_attr_dict_factory() + self.K3._node[1] = self.K3.node_attr_dict_factory() + self.K3._node[2] = self.K3.node_attr_dict_factory() diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_reportviews.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_reportviews.py new file mode 100644 index 0000000..8461be2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_reportviews.py @@ -0,0 +1,1421 @@ +import pickle +from copy import deepcopy + +import pytest + +import networkx as nx +from networkx.classes import reportviews as rv +from networkx.classes.reportviews import NodeDataView + + +# Nodes +class TestNodeView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.nv = cls.G.nodes # NodeView(G) + + def test_pickle(self): + import pickle + + nv = self.nv + pnv = pickle.loads(pickle.dumps(nv, -1)) + assert nv == pnv + assert nv.__slots__ == pnv.__slots__ + + def test_str(self): + assert str(self.nv) == "[0, 1, 2, 3, 4, 5, 6, 7, 8]" + + def test_repr(self): + assert repr(self.nv) == "NodeView((0, 1, 2, 3, 4, 5, 6, 7, 8))" + + def test_contains(self): + G = self.G.copy() + nv = G.nodes + assert 7 in nv + assert 9 not in nv + G.remove_node(7) + G.add_node(9) + assert 7 not in nv + assert 9 in nv + + def test_getitem(self): + G = self.G.copy() + nv = G.nodes + G.nodes[3]["foo"] = "bar" + assert nv[7] == {} + assert nv[3] == {"foo": "bar"} + # slicing + with pytest.raises(nx.NetworkXError): + G.nodes[0:5] + + def test_iter(self): + nv = self.nv + for i, n in enumerate(nv): + assert i == n + inv = iter(nv) + assert next(inv) == 0 + assert iter(nv) != nv + assert iter(inv) == inv + inv2 = iter(nv) + next(inv2) + assert list(inv) == list(inv2) + # odd case where NodeView calls NodeDataView with data=False + nnv = nv(data=False) + for i, n in enumerate(nnv): + assert i == n + + def test_call(self): + nodes = self.nv + assert nodes is nodes() + assert nodes is not nodes(data=True) + assert nodes is not nodes(data="weight") + + +class TestNodeDataView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.nv = NodeDataView(cls.G) + cls.ndv = cls.G.nodes.data(True) + cls.nwv = cls.G.nodes.data("foo") + + def test_viewtype(self): + nv = self.G.nodes + ndvfalse = nv.data(False) + assert nv is ndvfalse + assert nv is not self.ndv + + def test_pickle(self): + import pickle + + nv = self.nv + pnv = pickle.loads(pickle.dumps(nv, -1)) + assert nv == pnv + assert nv.__slots__ == pnv.__slots__ + + def test_str(self): + msg = str([(n, {}) for n in range(9)]) + assert str(self.ndv) == msg + + def test_repr(self): + expected = "NodeDataView((0, 1, 2, 3, 4, 5, 6, 7, 8))" + assert repr(self.nv) == expected + expected = ( + "NodeDataView({0: {}, 1: {}, 2: {}, 3: {}, " + + "4: {}, 5: {}, 6: {}, 7: {}, 8: {}})" + ) + assert repr(self.ndv) == expected + expected = ( + "NodeDataView({0: None, 1: None, 2: None, 3: None, 4: None, " + + "5: None, 6: None, 7: None, 8: None}, data='foo')" + ) + assert repr(self.nwv) == expected + + def test_contains(self): + G = self.G.copy() + nv = G.nodes.data() + nwv = G.nodes.data("foo") + G.nodes[3]["foo"] = "bar" + assert (7, {}) in nv + assert (3, {"foo": "bar"}) in nv + assert (3, "bar") in nwv + assert (7, None) in nwv + # default + nwv_def = G.nodes(data="foo", default="biz") + assert (7, "biz") in nwv_def + assert (3, "bar") in nwv_def + + def test_getitem(self): + G = self.G.copy() + nv = G.nodes + G.nodes[3]["foo"] = "bar" + assert nv[3] == {"foo": "bar"} + # default + nwv_def = G.nodes(data="foo", default="biz") + assert nwv_def[7], "biz" + assert nwv_def[3] == "bar" + # slicing + with pytest.raises(nx.NetworkXError): + G.nodes.data()[0:5] + + def test_iter(self): + G = self.G.copy() + nv = G.nodes.data() + ndv = G.nodes.data(True) + nwv = G.nodes.data("foo") + for i, (n, d) in enumerate(nv): + assert i == n + assert d == {} + inv = iter(nv) + assert next(inv) == (0, {}) + G.nodes[3]["foo"] = "bar" + # default + for n, d in nv: + if n == 3: + assert d == {"foo": "bar"} + else: + assert d == {} + # data=True + for n, d in ndv: + if n == 3: + assert d == {"foo": "bar"} + else: + assert d == {} + # data='foo' + for n, d in nwv: + if n == 3: + assert d == "bar" + else: + assert d is None + # data='foo', default=1 + for n, d in G.nodes.data("foo", default=1): + if n == 3: + assert d == "bar" + else: + assert d == 1 + + +def test_nodedataview_unhashable(): + G = nx.path_graph(9) + G.nodes[3]["foo"] = "bar" + nvs = [G.nodes.data()] + nvs.append(G.nodes.data(True)) + H = G.copy() + H.nodes[4]["foo"] = {1, 2, 3} + nvs.append(H.nodes.data(True)) + # raise unhashable + for nv in nvs: + pytest.raises(TypeError, set, nv) + pytest.raises(TypeError, eval, "nv | nv", locals()) + # no raise... hashable + Gn = G.nodes.data(False) + set(Gn) + Gn | Gn + Gn = G.nodes.data("foo") + set(Gn) + Gn | Gn + + +class TestNodeViewSetOps: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes + + def n_its(self, nodes): + return set(nodes) + + def test_len(self): + G = self.G.copy() + nv = G.nodes + assert len(nv) == 9 + G.remove_node(7) + assert len(nv) == 8 + G.add_node(9) + assert len(nv) == 9 + + def test_and(self): + nv = self.nv + some_nodes = self.n_its(range(5, 12)) + assert nv & some_nodes == self.n_its(range(5, 9)) + assert some_nodes & nv == self.n_its(range(5, 9)) + + def test_or(self): + nv = self.nv + some_nodes = self.n_its(range(5, 12)) + assert nv | some_nodes == self.n_its(range(12)) + assert some_nodes | nv == self.n_its(range(12)) + + def test_xor(self): + nv = self.nv + some_nodes = self.n_its(range(5, 12)) + nodes = {0, 1, 2, 3, 4, 9, 10, 11} + assert nv ^ some_nodes == self.n_its(nodes) + assert some_nodes ^ nv == self.n_its(nodes) + + def test_sub(self): + nv = self.nv + some_nodes = self.n_its(range(5, 12)) + assert nv - some_nodes == self.n_its(range(5)) + assert some_nodes - nv == self.n_its(range(9, 12)) + + +class TestNodeDataViewSetOps(TestNodeViewSetOps): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes.data("foo") + + def n_its(self, nodes): + return {(node, "bar" if node == 3 else None) for node in nodes} + + +class TestNodeDataViewDefaultSetOps(TestNodeDataViewSetOps): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.G.nodes[3]["foo"] = "bar" + cls.nv = cls.G.nodes.data("foo", default=1) + + def n_its(self, nodes): + return {(node, "bar" if node == 3 else 1) for node in nodes} + + +# Edges Data View +class TestEdgeDataView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.eview = nx.reportviews.EdgeView + + def test_pickle(self): + import pickle + + ev = self.eview(self.G)(data=True) + pev = pickle.loads(pickle.dumps(ev, -1)) + assert list(ev) == list(pev) + assert ev.__slots__ == pev.__slots__ + + def modify_edge(self, G, e, **kwds): + G._adj[e[0]][e[1]].update(kwds) + + def test_str(self): + ev = self.eview(self.G)(data=True) + rep = str([(n, n + 1, {}) for n in range(8)]) + assert str(ev) == rep + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "EdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_iterdata(self): + G = self.G.copy() + evr = self.eview(G) + ev = evr(data=True) + ev_def = evr(data="foo", default=1) + + for u, v, d in ev: + pass + assert d == {} + + for u, v, wt in ev_def: + pass + assert wt == 1 + + self.modify_edge(G, (2, 3), foo="bar") + for e in ev: + assert len(e) == 3 + if set(e[:2]) == {2, 3}: + assert e[2] == {"foo": "bar"} + checked = True + else: + assert e[2] == {} + assert checked + + for e in ev_def: + assert len(e) == 3 + if set(e[:2]) == {2, 3}: + assert e[2] == "bar" + checked_wt = True + else: + assert e[2] == 1 + assert checked_wt + + def test_iter(self): + evr = self.eview(self.G) + ev = evr() + for u, v in ev: + pass + iev = iter(ev) + assert next(iev) == (0, 1) + assert iter(ev) != ev + assert iter(iev) == iev + + def test_contains(self): + evr = self.eview(self.G) + ev = evr() + if self.G.is_directed(): + assert (1, 2) in ev and (2, 1) not in ev + else: + assert (1, 2) in ev and (2, 1) in ev + assert (1, 4) not in ev + assert (1, 90) not in ev + assert (90, 1) not in ev + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + if self.G.is_directed(): + assert (0, 1) in ev + assert (1, 2) not in ev + assert (2, 3) in ev + else: + assert (0, 1) in ev + assert (1, 2) in ev + assert (2, 3) in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + def test_len(self): + evr = self.eview(self.G) + ev = evr(data="foo") + assert len(ev) == 8 + assert len(evr(1)) == 2 + assert len(evr([1, 2, 3])) == 4 + + assert len(self.G.edges(1)) == 2 + assert len(self.G.edges()) == 8 + assert len(self.G.edges) == 8 + + H = self.G.copy() + H.add_edge(1, 1) + assert len(H.edges(1)) == 3 + assert len(H.edges()) == 9 + assert len(H.edges) == 9 + + +class TestOutEdgeDataView(TestEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.DiGraph()) + cls.eview = nx.reportviews.OutEdgeView + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "OutEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_len(self): + evr = self.eview(self.G) + ev = evr(data="foo") + assert len(ev) == 8 + assert len(evr(1)) == 1 + assert len(evr([1, 2, 3])) == 3 + + assert len(self.G.edges(1)) == 1 + assert len(self.G.edges()) == 8 + assert len(self.G.edges) == 8 + + H = self.G.copy() + H.add_edge(1, 1) + assert len(H.edges(1)) == 2 + assert len(H.edges()) == 9 + assert len(H.edges) == 9 + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert (1, 2) not in ev + assert (2, 3) in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +class TestInEdgeDataView(TestOutEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.DiGraph()) + cls.eview = nx.reportviews.InEdgeView + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "InEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) not in ev + assert (1, 2) in ev + assert (2, 3) not in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +class TestMultiEdgeDataView(TestEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiGraph()) + cls.eview = nx.reportviews.MultiEdgeView + + def modify_edge(self, G, e, **kwds): + G._adj[e[0]][e[1]][0].update(kwds) + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "MultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert (1, 2) in ev + assert (2, 3) in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +class TestOutMultiEdgeDataView(TestOutEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.eview = nx.reportviews.OutMultiEdgeView + + def modify_edge(self, G, e, **kwds): + G._adj[e[0]][e[1]][0].update(kwds) + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "OutMultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) in ev + assert (1, 2) not in ev + assert (2, 3) in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +class TestInMultiEdgeDataView(TestOutMultiEdgeDataView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=nx.MultiDiGraph()) + cls.eview = nx.reportviews.InMultiEdgeView + + def test_repr(self): + ev = self.eview(self.G)(data=True) + rep = ( + "InMultiEdgeDataView([(0, 1, {}), (1, 2, {}), " + + "(2, 3, {}), (3, 4, {}), " + + "(4, 5, {}), (5, 6, {}), " + + "(6, 7, {}), (7, 8, {})])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + evr = self.eview(self.G) + ev = evr(nbunch=[0, 2]) + assert (0, 1) not in ev + assert (1, 2) in ev + assert (2, 3) not in ev + assert (3, 4) not in ev + assert (4, 5) not in ev + assert (5, 6) not in ev + assert (7, 8) not in ev + assert (8, 9) not in ev + + +# Edge Views +class TestEdgeView: + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9) + cls.eview = nx.reportviews.EdgeView + + def test_pickle(self): + import pickle + + ev = self.eview(self.G) + pev = pickle.loads(pickle.dumps(ev, -1)) + assert ev == pev + assert ev.__slots__ == pev.__slots__ + + def modify_edge(self, G, e, **kwds): + G._adj[e[0]][e[1]].update(kwds) + + def test_str(self): + ev = self.eview(self.G) + rep = str([(n, n + 1) for n in range(8)]) + assert str(ev) == rep + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "EdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep + + def test_getitem(self): + G = self.G.copy() + ev = G.edges + G.edges[0, 1]["foo"] = "bar" + assert ev[0, 1] == {"foo": "bar"} + + # slicing + with pytest.raises(nx.NetworkXError, match=".*does not support slicing"): + G.edges[0:5] + + # Invalid edge + with pytest.raises(KeyError, match=r".*edge.*is not in the graph."): + G.edges[0, 9] + + def test_call(self): + ev = self.eview(self.G) + assert id(ev) == id(ev()) + assert id(ev) == id(ev(data=False)) + assert id(ev) != id(ev(data=True)) + assert id(ev) != id(ev(nbunch=1)) + + def test_data(self): + ev = self.eview(self.G) + assert id(ev) != id(ev.data()) + assert id(ev) == id(ev.data(data=False)) + assert id(ev) != id(ev.data(data=True)) + assert id(ev) != id(ev.data(nbunch=1)) + + def test_iter(self): + ev = self.eview(self.G) + for u, v in ev: + pass + iev = iter(ev) + assert next(iev) == (0, 1) + assert iter(ev) != ev + assert iter(iev) == iev + + def test_contains(self): + ev = self.eview(self.G) + edv = ev() + if self.G.is_directed(): + assert (1, 2) in ev and (2, 1) not in ev + assert (1, 2) in edv and (2, 1) not in edv + else: + assert (1, 2) in ev and (2, 1) in ev + assert (1, 2) in edv and (2, 1) in edv + assert (1, 4) not in ev + assert (1, 4) not in edv + # edge not in graph + assert (1, 90) not in ev + assert (90, 1) not in ev + assert (1, 90) not in edv + assert (90, 1) not in edv + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) in evn + assert (2, 3) in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + def test_len(self): + ev = self.eview(self.G) + num_ed = 9 if self.G.is_multigraph() else 8 + assert len(ev) == num_ed + + H = self.G.copy() + H.add_edge(1, 1) + assert len(H.edges(1)) == 3 + H.is_multigraph() - H.is_directed() + assert len(H.edges()) == num_ed + 1 + assert len(H.edges) == num_ed + 1 + + def test_and(self): + ev = self.eview(self.G) + some_edges = {(0, 1), (1, 0), (0, 2)} + if self.G.is_directed(): + assert some_edges & ev, {(0, 1)} + assert ev & some_edges, {(0, 1)} + else: + assert ev & some_edges == {(0, 1), (1, 0)} + assert some_edges & ev == {(0, 1), (1, 0)} + return + + def test_or(self): + ev = self.eview(self.G) + some_edges = {(0, 1), (1, 0), (0, 2)} + result1 = {(n, n + 1) for n in range(8)} + result1.update(some_edges) + result2 = {(n + 1, n) for n in range(8)} + result2.update(some_edges) + assert (ev | some_edges) in (result1, result2) + assert (some_edges | ev) in (result1, result2) + + def test_xor(self): + ev = self.eview(self.G) + some_edges = {(0, 1), (1, 0), (0, 2)} + if self.G.is_directed(): + result = {(n, n + 1) for n in range(1, 8)} + result.update({(1, 0), (0, 2)}) + assert ev ^ some_edges == result + else: + result = {(n, n + 1) for n in range(1, 8)} + result.update({(0, 2)}) + assert ev ^ some_edges == result + return + + def test_sub(self): + ev = self.eview(self.G) + some_edges = {(0, 1), (1, 0), (0, 2)} + result = {(n, n + 1) for n in range(8)} + result.remove((0, 1)) + assert ev - some_edges, result + + +class TestOutEdgeView(TestEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.DiGraph()) + cls.eview = nx.reportviews.OutEdgeView + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "OutEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) not in evn + assert (2, 3) in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +class TestInEdgeView(TestEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.DiGraph()) + cls.eview = nx.reportviews.InEdgeView + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "InEdgeView([(0, 1), (1, 2), (2, 3), (3, 4), " + + "(4, 5), (5, 6), (6, 7), (7, 8)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) not in evn + assert (1, 2) in evn + assert (2, 3) not in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +class TestMultiEdgeView(TestEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.MultiEdgeView + + def modify_edge(self, G, e, **kwds): + if len(e) == 2: + e = e + (0,) + G._adj[e[0]][e[1]][e[2]].update(kwds) + + def test_str(self): + ev = self.eview(self.G) + replist = [(n, n + 1, 0) for n in range(8)] + replist.insert(2, (1, 2, 3)) + rep = str(replist) + assert str(ev) == rep + + def test_getitem(self): + G = self.G.copy() + ev = G.edges + G.edges[0, 1, 0]["foo"] = "bar" + assert ev[0, 1, 0] == {"foo": "bar"} + + # slicing + with pytest.raises(nx.NetworkXError): + G.edges[0:5] + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "MultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), " + + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep + + def test_call(self): + ev = self.eview(self.G) + assert id(ev) == id(ev(keys=True)) + assert id(ev) == id(ev(data=False, keys=True)) + assert id(ev) != id(ev(keys=False)) + assert id(ev) != id(ev(data=True)) + assert id(ev) != id(ev(nbunch=1)) + + def test_data(self): + ev = self.eview(self.G) + assert id(ev) != id(ev.data()) + assert id(ev) == id(ev.data(data=False, keys=True)) + assert id(ev) != id(ev.data(keys=False)) + assert id(ev) != id(ev.data(data=True)) + assert id(ev) != id(ev.data(nbunch=1)) + + def test_iter(self): + ev = self.eview(self.G) + for u, v, k in ev: + pass + iev = iter(ev) + assert next(iev) == (0, 1, 0) + assert iter(ev) != ev + assert iter(iev) == iev + + def test_iterkeys(self): + G = self.G + evr = self.eview(G) + ev = evr(keys=True) + for u, v, k in ev: + pass + assert k == 0 + ev = evr(keys=True, data="foo", default=1) + for u, v, k, wt in ev: + pass + assert wt == 1 + + self.modify_edge(G, (2, 3, 0), foo="bar") + ev = evr(keys=True, data=True) + for e in ev: + assert len(e) == 4 + if set(e[:2]) == {2, 3}: + assert e[2] == 0 + assert e[3] == {"foo": "bar"} + checked = True + elif set(e[:3]) == {1, 2, 3}: + assert e[2] == 3 + assert e[3] == {"foo": "bar"} + checked_multi = True + else: + assert e[2] == 0 + assert e[3] == {} + assert checked + assert checked_multi + ev = evr(keys=True, data="foo", default=1) + for e in ev: + if set(e[:2]) == {1, 2} and e[2] == 3: + assert e[3] == "bar" + if set(e[:2]) == {1, 2} and e[2] == 0: + assert e[3] == 1 + if set(e[:2]) == {2, 3}: + assert e[2] == 0 + assert e[3] == "bar" + assert len(e) == 4 + checked_wt = True + assert checked_wt + ev = evr(keys=True) + for e in ev: + assert len(e) == 3 + elist = sorted([(i, i + 1, 0) for i in range(8)] + [(1, 2, 3)]) + assert sorted(ev) == elist + # test that the keyword arguments are passed correctly + ev = evr((1, 2), "foo", keys=True, default=1) + with pytest.raises(TypeError): + evr((1, 2), "foo", True, 1) + with pytest.raises(TypeError): + evr((1, 2), "foo", True, default=1) + for e in ev: + if set(e[:2]) == {1, 2}: + assert e[2] in {0, 3} + if e[2] == 3: + assert e[3] == "bar" + else: # e[2] == 0 + assert e[3] == 1 + if G.is_directed(): + assert len(list(ev)) == 3 + else: + assert len(list(ev)) == 4 + + def test_or(self): + ev = self.eview(self.G) + some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} + result = {(n, n + 1, 0) for n in range(8)} + result.update(some_edges) + result.update({(1, 2, 3)}) + assert ev | some_edges == result + assert some_edges | ev == result + + def test_sub(self): + ev = self.eview(self.G) + some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} + result = {(n, n + 1, 0) for n in range(8)} + result.remove((0, 1, 0)) + result.update({(1, 2, 3)}) + assert ev - some_edges, result + assert some_edges - ev, result + + def test_xor(self): + ev = self.eview(self.G) + some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} + if self.G.is_directed(): + result = {(n, n + 1, 0) for n in range(1, 8)} + result.update({(1, 0, 0), (0, 2, 0), (1, 2, 3)}) + assert ev ^ some_edges == result + assert some_edges ^ ev == result + else: + result = {(n, n + 1, 0) for n in range(1, 8)} + result.update({(0, 2, 0), (1, 2, 3)}) + assert ev ^ some_edges == result + assert some_edges ^ ev == result + + def test_and(self): + ev = self.eview(self.G) + some_edges = {(0, 1, 0), (1, 0, 0), (0, 2, 0)} + if self.G.is_directed(): + assert ev & some_edges == {(0, 1, 0)} + assert some_edges & ev == {(0, 1, 0)} + else: + assert ev & some_edges == {(0, 1, 0), (1, 0, 0)} + assert some_edges & ev == {(0, 1, 0), (1, 0, 0)} + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) in evn + assert (2, 3) in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +class TestOutMultiEdgeView(TestMultiEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiDiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.OutMultiEdgeView + + def modify_edge(self, G, e, **kwds): + if len(e) == 2: + e = e + (0,) + G._adj[e[0]][e[1]][e[2]].update(kwds) + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "OutMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0)," + + " (3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) in evn + assert (1, 2) not in evn + assert (2, 3) in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +class TestInMultiEdgeView(TestMultiEdgeView): + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, nx.MultiDiGraph()) + cls.G.add_edge(1, 2, key=3, foo="bar") + cls.eview = nx.reportviews.InMultiEdgeView + + def modify_edge(self, G, e, **kwds): + if len(e) == 2: + e = e + (0,) + G._adj[e[0]][e[1]][e[2]].update(kwds) + + def test_repr(self): + ev = self.eview(self.G) + rep = ( + "InMultiEdgeView([(0, 1, 0), (1, 2, 0), (1, 2, 3), (2, 3, 0), " + + "(3, 4, 0), (4, 5, 0), (5, 6, 0), (6, 7, 0), (7, 8, 0)])" + ) + assert repr(ev) == rep + + def test_contains_with_nbunch(self): + ev = self.eview(self.G) + evn = ev(nbunch=[0, 2]) + assert (0, 1) not in evn + assert (1, 2) in evn + assert (2, 3) not in evn + assert (3, 4) not in evn + assert (4, 5) not in evn + assert (5, 6) not in evn + assert (7, 8) not in evn + assert (8, 9) not in evn + + +# Degrees +class TestDegreeView: + GRAPH = nx.Graph + dview = nx.reportviews.DegreeView + + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(6, cls.GRAPH()) + cls.G.add_edge(1, 3, foo=2) + cls.G.add_edge(1, 3, foo=3) + + def test_pickle(self): + import pickle + + deg = self.G.degree + pdeg = pickle.loads(pickle.dumps(deg, -1)) + assert dict(deg) == dict(pdeg) + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 1), (1, 3), (2, 2), (3, 3), (4, 2), (5, 1)]) + assert str(dv) == rep + dv = self.G.degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.dview(self.G) + rep = "DegreeView({0: 1, 1: 3, 2: 2, 3: 3, 4: 2, 5: 1})" + assert repr(dv) == rep + + def test_iter(self): + dv = self.dview(self.G) + for n, d in dv: + pass + idv = iter(dv) + assert iter(dv) != dv + assert iter(idv) == idv + assert next(idv) == (0, dv[0]) + assert next(idv) == (1, dv[1]) + # weighted + dv = self.dview(self.G, weight="foo") + for n, d in dv: + pass + idv = iter(dv) + assert iter(dv) != dv + assert iter(idv) == idv + assert next(idv) == (0, dv[0]) + assert next(idv) == (1, dv[1]) + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 1 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 2), (3, 3)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 1 + assert dv[1] == 3 + assert dv[2] == 2 + assert dv[3] == 3 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 5 + assert dv[2] == 2 + assert dv[3] == 5 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 5 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 2), (3, 5)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 5 + assert dvd[2] == 2 + assert dvd[3] == 5 + + def test_len(self): + dv = self.dview(self.G) + assert len(dv) == 6 + + +class TestDiDegreeView(TestDegreeView): + GRAPH = nx.DiGraph + dview = nx.reportviews.DiDegreeView + + def test_repr(self): + dv = self.G.degree() + rep = "DiDegreeView({0: 1, 1: 3, 2: 2, 3: 3, 4: 2, 5: 1})" + assert repr(dv) == rep + + +class TestOutDegreeView(TestDegreeView): + GRAPH = nx.DiGraph + dview = nx.reportviews.OutDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 1), (1, 2), (2, 1), (3, 1), (4, 1), (5, 0)]) + assert str(dv) == rep + dv = self.G.out_degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.out_degree() + rep = "OutDegreeView({0: 1, 1: 2, 2: 1, 3: 1, 4: 1, 5: 0})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 1 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 1), (3, 1)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 1 + assert dv[1] == 2 + assert dv[2] == 1 + assert dv[3] == 1 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 4 + assert dv[2] == 1 + assert dv[3] == 1 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 4 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 1)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 4 + assert dvd[2] == 1 + assert dvd[3] == 1 + + +class TestInDegreeView(TestDegreeView): + GRAPH = nx.DiGraph + dview = nx.reportviews.InDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 0), (1, 1), (2, 1), (3, 2), (4, 1), (5, 1)]) + assert str(dv) == rep + dv = self.G.in_degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.in_degree() + rep = "InDegreeView({0: 0, 1: 1, 2: 1, 3: 2, 4: 1, 5: 1})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 0 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 1), (3, 2)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 2 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 4 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 0 + dvw = dv(1, weight="foo") + assert dvw == 1 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 4)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 0 + assert dvd[1] == 1 + assert dvd[2] == 1 + assert dvd[3] == 4 + + +class TestMultiDegreeView(TestDegreeView): + GRAPH = nx.MultiGraph + dview = nx.reportviews.MultiDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 1), (1, 4), (2, 2), (3, 4), (4, 2), (5, 1)]) + assert str(dv) == rep + dv = self.G.degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.degree() + rep = "MultiDegreeView({0: 1, 1: 4, 2: 2, 3: 4, 4: 2, 5: 1})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 1 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 2), (3, 4)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 1 + assert dv[1] == 4 + assert dv[2] == 2 + assert dv[3] == 4 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 7 + assert dv[2] == 2 + assert dv[3] == 7 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 7 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 2), (3, 7)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 7 + assert dvd[2] == 2 + assert dvd[3] == 7 + + +class TestDiMultiDegreeView(TestMultiDegreeView): + GRAPH = nx.MultiDiGraph + dview = nx.reportviews.DiMultiDegreeView + + def test_repr(self): + dv = self.G.degree() + rep = "DiMultiDegreeView({0: 1, 1: 4, 2: 2, 3: 4, 4: 2, 5: 1})" + assert repr(dv) == rep + + +class TestOutMultiDegreeView(TestDegreeView): + GRAPH = nx.MultiDiGraph + dview = nx.reportviews.OutMultiDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 1), (1, 3), (2, 1), (3, 1), (4, 1), (5, 0)]) + assert str(dv) == rep + dv = self.G.out_degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.out_degree() + rep = "OutMultiDegreeView({0: 1, 1: 3, 2: 1, 3: 1, 4: 1, 5: 0})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 1 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 1), (3, 1)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 1 + assert dv[1] == 3 + assert dv[2] == 1 + assert dv[3] == 1 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 1 + assert dv[1] == 6 + assert dv[2] == 1 + assert dv[3] == 1 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 1 + dvw = dv(1, weight="foo") + assert dvw == 6 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 1)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 1 + assert dvd[1] == 6 + assert dvd[2] == 1 + assert dvd[3] == 1 + + +class TestInMultiDegreeView(TestDegreeView): + GRAPH = nx.MultiDiGraph + dview = nx.reportviews.InMultiDegreeView + + def test_str(self): + dv = self.dview(self.G) + rep = str([(0, 0), (1, 1), (2, 1), (3, 3), (4, 1), (5, 1)]) + assert str(dv) == rep + dv = self.G.in_degree() + assert str(dv) == rep + + def test_repr(self): + dv = self.G.in_degree() + rep = "InMultiDegreeView({0: 0, 1: 1, 2: 1, 3: 3, 4: 1, 5: 1})" + assert repr(dv) == rep + + def test_nbunch(self): + dv = self.dview(self.G) + dvn = dv(0) + assert dvn == 0 + dvn = dv([2, 3]) + assert sorted(dvn) == [(2, 1), (3, 3)] + + def test_getitem(self): + dv = self.dview(self.G) + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 3 + dv = self.dview(self.G, weight="foo") + assert dv[0] == 0 + assert dv[1] == 1 + assert dv[2] == 1 + assert dv[3] == 6 + + def test_weight(self): + dv = self.dview(self.G) + dvw = dv(0, weight="foo") + assert dvw == 0 + dvw = dv(1, weight="foo") + assert dvw == 1 + dvw = dv([2, 3], weight="foo") + assert sorted(dvw) == [(2, 1), (3, 6)] + dvd = dict(dv(weight="foo")) + assert dvd[0] == 0 + assert dvd[1] == 1 + assert dvd[2] == 1 + assert dvd[3] == 6 + + +@pytest.mark.parametrize( + ("reportview", "err_msg_terms"), + ( + (rv.NodeView, "list(G.nodes"), + (rv.NodeDataView, "list(G.nodes.data"), + (rv.EdgeView, "list(G.edges"), + # Directed EdgeViews + (rv.InEdgeView, "list(G.in_edges"), + (rv.OutEdgeView, "list(G.edges"), + # Multi EdgeViews + (rv.MultiEdgeView, "list(G.edges"), + (rv.InMultiEdgeView, "list(G.in_edges"), + (rv.OutMultiEdgeView, "list(G.edges"), + ), +) +def test_slicing_reportviews(reportview, err_msg_terms): + G = nx.complete_graph(3) + view = reportview(G) + with pytest.raises(nx.NetworkXError) as exc: + view[0:2] + errmsg = str(exc.value) + assert type(view).__name__ in errmsg + assert err_msg_terms in errmsg + + +@pytest.mark.parametrize( + "graph", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] +) +def test_cache_dict_get_set_state(graph): + G = nx.path_graph(5, graph()) + G.nodes, G.edges, G.adj, G.degree + if G.is_directed(): + G.pred, G.succ, G.in_edges, G.out_edges, G.in_degree, G.out_degree + cached_dict = G.__dict__ + assert "nodes" in cached_dict + assert "edges" in cached_dict + assert "adj" in cached_dict + assert "degree" in cached_dict + if G.is_directed(): + assert "pred" in cached_dict + assert "succ" in cached_dict + assert "in_edges" in cached_dict + assert "out_edges" in cached_dict + assert "in_degree" in cached_dict + assert "out_degree" in cached_dict + + # Raises error if the cached properties and views do not work + pickle.loads(pickle.dumps(G, -1)) + deepcopy(G) + + +def test_edge_views_inherit_from_EdgeViewABC(): + all_edge_view_classes = (v for v in dir(nx.reportviews) if "Edge" in v) + for eview_class in all_edge_view_classes: + assert issubclass( + getattr(nx.reportviews, eview_class), nx.reportviews.EdgeViewABC + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_special.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_special.py new file mode 100644 index 0000000..33b61a4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_special.py @@ -0,0 +1,131 @@ +import networkx as nx + +from .test_digraph import BaseDiGraphTester +from .test_digraph import TestDiGraph as _TestDiGraph +from .test_graph import BaseGraphTester +from .test_graph import TestGraph as _TestGraph +from .test_multidigraph import TestMultiDiGraph as _TestMultiDiGraph +from .test_multigraph import TestMultiGraph as _TestMultiGraph + + +def test_factories(): + class mydict1(dict): + pass + + class mydict2(dict): + pass + + class mydict3(dict): + pass + + class mydict4(dict): + pass + + class mydict5(dict): + pass + + for Graph in (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph): + + class MyGraph(Graph): + node_dict_factory = mydict1 + adjlist_outer_dict_factory = mydict2 + adjlist_inner_dict_factory = mydict3 + edge_key_dict_factory = mydict4 + edge_attr_dict_factory = mydict5 + + G = MyGraph() + assert isinstance(G._node, mydict1) + assert isinstance(G._adj, mydict2) + G.add_node(1) + assert isinstance(G._adj[1], mydict3) + if G.is_directed(): + assert isinstance(G._pred, mydict2) + assert isinstance(G._succ, mydict2) + assert isinstance(G._pred[1], mydict3) + G.add_edge(1, 2) + if G.is_multigraph(): + assert isinstance(G._adj[1][2], mydict4) + assert isinstance(G._adj[1][2][0], mydict5) + else: + assert isinstance(G._adj[1][2], mydict5) + + +class TestSpecialGraph(_TestGraph): + def setup_method(self): + _TestGraph.setup_method(self) + self.Graph = nx.Graph + + +class TestThinGraph(BaseGraphTester): + def setup_method(self): + all_edge_dict = {"weight": 1} + + class MyGraph(nx.Graph): + def edge_attr_dict_factory(self): + return all_edge_dict + + self.Graph = MyGraph + # build dict-of-dict-of-dict K3 + ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._adj = self.k3adj + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + +class TestSpecialDiGraph(_TestDiGraph): + def setup_method(self): + _TestDiGraph.setup_method(self) + self.Graph = nx.DiGraph + + +class TestThinDiGraph(BaseDiGraphTester): + def setup_method(self): + all_edge_dict = {"weight": 1} + + class MyGraph(nx.DiGraph): + def edge_attr_dict_factory(self): + return all_edge_dict + + self.Graph = MyGraph + # build dict-of-dict-of-dict K3 + ed1, ed2, ed3 = (all_edge_dict, all_edge_dict, all_edge_dict) + ed4, ed5, ed6 = (all_edge_dict, all_edge_dict, all_edge_dict) + self.k3adj = {0: {1: ed1, 2: ed2}, 1: {0: ed3, 2: ed4}, 2: {0: ed5, 1: ed6}} + self.k3edges = [(0, 1), (0, 2), (1, 2)] + self.k3nodes = [0, 1, 2] + self.K3 = self.Graph() + self.K3._succ = self.k3adj + # K3._adj is synced with K3._succ + self.K3._pred = {0: {1: ed3, 2: ed5}, 1: {0: ed1, 2: ed6}, 2: {0: ed2, 1: ed4}} + self.K3._node = {} + self.K3._node[0] = {} + self.K3._node[1] = {} + self.K3._node[2] = {} + + ed1, ed2 = (all_edge_dict, all_edge_dict) + self.P3 = self.Graph() + self.P3._succ = {0: {1: ed1}, 1: {2: ed2}, 2: {}} + # P3._adj is synced with P3._succ + self.P3._pred = {0: {}, 1: {0: ed1}, 2: {1: ed2}} + self.P3._node = {} + self.P3._node[0] = {} + self.P3._node[1] = {} + self.P3._node[2] = {} + + +class TestSpecialMultiGraph(_TestMultiGraph): + def setup_method(self): + _TestMultiGraph.setup_method(self) + self.Graph = nx.MultiGraph + + +class TestSpecialMultiDiGraph(_TestMultiDiGraph): + def setup_method(self): + _TestMultiDiGraph.setup_method(self) + self.Graph = nx.MultiDiGraph diff --git a/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_subgraphviews.py b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_subgraphviews.py new file mode 100644 index 0000000..66d570c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/classes/tests/test_subgraphviews.py @@ -0,0 +1,371 @@ +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +class TestSubGraphView: + gview = staticmethod(nx.subgraph_view) + graph = nx.Graph + hide_edges_filter = staticmethod(nx.filters.hide_edges) + show_edges_filter = staticmethod(nx.filters.show_edges) + + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=cls.graph()) + cls.hide_edges_w_hide_nodes = {(3, 4), (4, 5), (5, 6)} + + def test_hidden_nodes(self): + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + gview = self.gview + G = gview(self.G, filter_node=nodes_gone) + assert self.G.nodes - G.nodes == {4, 5} + assert self.G.edges - G.edges == self.hide_edges_w_hide_nodes + if G.is_directed(): + assert list(G[3]) == [] + assert list(G[2]) == [3] + else: + assert list(G[3]) == [2] + assert set(G[2]) == {1, 3} + pytest.raises(KeyError, G.__getitem__, 4) + pytest.raises(KeyError, G.__getitem__, 112) + pytest.raises(KeyError, G.__getitem__, 111) + assert G.degree(3) == (3 if G.is_multigraph() else 1) + assert G.size() == (7 if G.is_multigraph() else 5) + + def test_hidden_edges(self): + hide_edges = [(2, 3), (8, 7), (222, 223)] + edges_gone = self.hide_edges_filter(hide_edges) + gview = self.gview + G = gview(self.G, filter_edge=edges_gone) + assert self.G.nodes == G.nodes + if G.is_directed(): + assert self.G.edges - G.edges == {(2, 3)} + assert list(G[2]) == [] + assert list(G.pred[3]) == [] + assert list(G.pred[2]) == [1] + assert G.size() == 7 + else: + assert self.G.edges - G.edges == {(2, 3), (7, 8)} + assert list(G[2]) == [1] + assert G.size() == 6 + assert list(G[3]) == [4] + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + assert G.degree(3) == 1 + + def test_shown_node(self): + induced_subgraph = nx.filters.show_nodes([2, 3, 111]) + gview = self.gview + G = gview(self.G, filter_node=induced_subgraph) + assert set(G.nodes) == {2, 3} + if G.is_directed(): + assert list(G[3]) == [] + else: + assert list(G[3]) == [2] + assert list(G[2]) == [3] + pytest.raises(KeyError, G.__getitem__, 4) + pytest.raises(KeyError, G.__getitem__, 112) + pytest.raises(KeyError, G.__getitem__, 111) + assert G.degree(3) == (3 if G.is_multigraph() else 1) + assert G.size() == (3 if G.is_multigraph() else 1) + + def test_shown_edges(self): + show_edges = [(2, 3), (8, 7), (222, 223)] + edge_subgraph = self.show_edges_filter(show_edges) + G = self.gview(self.G, filter_edge=edge_subgraph) + assert self.G.nodes == G.nodes + if G.is_directed(): + assert G.edges == {(2, 3)} + assert list(G[3]) == [] + assert list(G[2]) == [3] + assert list(G.pred[3]) == [2] + assert list(G.pred[2]) == [] + assert G.size() == 1 + else: + assert G.edges == {(2, 3), (7, 8)} + assert list(G[3]) == [2] + assert list(G[2]) == [3] + assert G.size() == 2 + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + assert G.degree(3) == 1 + + +class TestSubDiGraphView(TestSubGraphView): + gview = staticmethod(nx.subgraph_view) + graph = nx.DiGraph + hide_edges_filter = staticmethod(nx.filters.hide_diedges) + show_edges_filter = staticmethod(nx.filters.show_diedges) + hide_edges = [(2, 3), (8, 7), (222, 223)] + excluded = {(2, 3), (3, 4), (4, 5), (5, 6)} + + def test_inoutedges(self): + edges_gone = self.hide_edges_filter(self.hide_edges) + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone) + + assert self.G.in_edges - G.in_edges == self.excluded + assert self.G.out_edges - G.out_edges == self.excluded + + def test_pred(self): + edges_gone = self.hide_edges_filter(self.hide_edges) + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone) + + assert list(G.pred[2]) == [1] + assert list(G.pred[6]) == [] + + def test_inout_degree(self): + edges_gone = self.hide_edges_filter(self.hide_edges) + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone) + + assert G.degree(2) == 1 + assert G.out_degree(2) == 0 + assert G.in_degree(2) == 1 + assert G.size() == 4 + + +# multigraph +class TestMultiGraphView(TestSubGraphView): + gview = staticmethod(nx.subgraph_view) + graph = nx.MultiGraph + hide_edges_filter = staticmethod(nx.filters.hide_multiedges) + show_edges_filter = staticmethod(nx.filters.show_multiedges) + + @classmethod + def setup_class(cls): + cls.G = nx.path_graph(9, create_using=cls.graph()) + multiedges = {(2, 3, 4), (2, 3, 5)} + cls.G.add_edges_from(multiedges) + cls.hide_edges_w_hide_nodes = {(3, 4, 0), (4, 5, 0), (5, 6, 0)} + + def test_hidden_edges(self): + hide_edges = [(2, 3, 4), (2, 3, 3), (8, 7, 0), (222, 223, 0)] + edges_gone = self.hide_edges_filter(hide_edges) + G = self.gview(self.G, filter_edge=edges_gone) + assert self.G.nodes == G.nodes + if G.is_directed(): + assert self.G.edges - G.edges == {(2, 3, 4)} + assert list(G[3]) == [4] + assert list(G[2]) == [3] + assert list(G.pred[3]) == [2] # only one 2 but two edges + assert list(G.pred[2]) == [1] + assert G.size() == 9 + else: + assert self.G.edges - G.edges == {(2, 3, 4), (7, 8, 0)} + assert list(G[3]) == [2, 4] + assert list(G[2]) == [1, 3] + assert G.size() == 8 + assert G.degree(3) == 3 + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + + def test_shown_edges(self): + show_edges = [(2, 3, 4), (2, 3, 3), (8, 7, 0), (222, 223, 0)] + edge_subgraph = self.show_edges_filter(show_edges) + G = self.gview(self.G, filter_edge=edge_subgraph) + assert self.G.nodes == G.nodes + if G.is_directed(): + assert G.edges == {(2, 3, 4)} + assert list(G[3]) == [] + assert list(G.pred[3]) == [2] + assert list(G.pred[2]) == [] + assert G.size() == 1 + else: + assert G.edges == {(2, 3, 4), (7, 8, 0)} + assert G.size() == 2 + assert list(G[3]) == [2] + assert G.degree(3) == 1 + assert list(G[2]) == [3] + pytest.raises(KeyError, G.__getitem__, 221) + pytest.raises(KeyError, G.__getitem__, 222) + + +# multidigraph +class TestMultiDiGraphView(TestMultiGraphView, TestSubDiGraphView): + gview = staticmethod(nx.subgraph_view) + graph = nx.MultiDiGraph + hide_edges_filter = staticmethod(nx.filters.hide_multidiedges) + show_edges_filter = staticmethod(nx.filters.show_multidiedges) + hide_edges = [(2, 3, 0), (8, 7, 0), (222, 223, 0)] + excluded = {(2, 3, 0), (3, 4, 0), (4, 5, 0), (5, 6, 0)} + + def test_inout_degree(self): + edges_gone = self.hide_edges_filter(self.hide_edges) + hide_nodes = [4, 5, 111] + nodes_gone = nx.filters.hide_nodes(hide_nodes) + G = self.gview(self.G, filter_node=nodes_gone, filter_edge=edges_gone) + + assert G.degree(2) == 3 + assert G.out_degree(2) == 2 + assert G.in_degree(2) == 1 + assert G.size() == 6 + + +# induced_subgraph +class TestInducedSubGraph: + @classmethod + def setup_class(cls): + cls.K3 = G = nx.complete_graph(3) + G.graph["foo"] = [] + G.nodes[0]["foo"] = [] + G.remove_edge(1, 2) + ll = [] + G.add_edge(1, 2, foo=ll) + G.add_edge(2, 1, foo=ll) + + def test_full_graph(self): + G = self.K3 + H = nx.induced_subgraph(G, [0, 1, 2, 5]) + assert H.name == G.name + self.graphs_equal(H, G) + self.same_attrdict(H, G) + + def test_partial_subgraph(self): + G = self.K3 + H = nx.induced_subgraph(G, 0) + assert dict(H.adj) == {0: {}} + assert dict(G.adj) != {0: {}} + + H = nx.induced_subgraph(G, [0, 1]) + assert dict(H.adj) == {0: {1: {}}, 1: {0: {}}} + + def same_attrdict(self, H, G): + old_foo = H[1][2]["foo"] + H.edges[1, 2]["foo"] = "baz" + assert G.edges == H.edges + H.edges[1, 2]["foo"] = old_foo + assert G.edges == H.edges + old_foo = H.nodes[0]["foo"] + H.nodes[0]["foo"] = "baz" + assert G.nodes == H.nodes + H.nodes[0]["foo"] = old_foo + assert G.nodes == H.nodes + + def graphs_equal(self, H, G): + assert G._adj == H._adj + assert G._node == H._node + assert G.graph == H.graph + assert G.name == H.name + if not G.is_directed() and not H.is_directed(): + assert H._adj[1][2] is H._adj[2][1] + assert G._adj[1][2] is G._adj[2][1] + else: # at least one is directed + if not G.is_directed(): + G._pred = G._adj + G._succ = G._adj + if not H.is_directed(): + H._pred = H._adj + H._succ = H._adj + assert G._pred == H._pred + assert G._succ == H._succ + assert H._succ[1][2] is H._pred[2][1] + assert G._succ[1][2] is G._pred[2][1] + + +# edge_subgraph +class TestEdgeSubGraph: + @classmethod + def setup_class(cls): + # Create a path graph on five nodes. + cls.G = G = nx.path_graph(5) + # Add some node, edge, and graph attributes. + for i in range(5): + G.nodes[i]["name"] = f"node{i}" + G.edges[0, 1]["name"] = "edge01" + G.edges[3, 4]["name"] = "edge34" + G.graph["name"] = "graph" + # Get the subgraph induced by the first and last edges. + cls.H = nx.edge_subgraph(G, [(0, 1), (3, 4)]) + + def test_correct_nodes(self): + """Tests that the subgraph has the correct nodes.""" + assert [(0, "node0"), (1, "node1"), (3, "node3"), (4, "node4")] == sorted( + self.H.nodes.data("name") + ) + + def test_correct_edges(self): + """Tests that the subgraph has the correct edges.""" + assert edges_equal( + [(0, 1, "edge01"), (3, 4, "edge34")], self.H.edges.data("name") + ) + + def test_add_node(self): + """Tests that adding a node to the original graph does not + affect the nodes of the subgraph. + + """ + self.G.add_node(5) + assert [0, 1, 3, 4] == sorted(self.H.nodes) + self.G.remove_node(5) + + def test_remove_node(self): + """Tests that removing a node in the original graph + removes the nodes of the subgraph. + + """ + self.G.remove_node(0) + assert [1, 3, 4] == sorted(self.H.nodes) + self.G.add_node(0, name="node0") + self.G.add_edge(0, 1, name="edge01") + + def test_node_attr_dict(self): + """Tests that the node attribute dictionary of the two graphs is + the same object. + + """ + for v in self.H: + assert self.G.nodes[v] == self.H.nodes[v] + # Making a change to G should make a change in H and vice versa. + self.G.nodes[0]["name"] = "foo" + assert self.G.nodes[0] == self.H.nodes[0] + self.H.nodes[1]["name"] = "bar" + assert self.G.nodes[1] == self.H.nodes[1] + # Revert the change, so tests pass with pytest-randomly + self.G.nodes[0]["name"] = "node0" + self.H.nodes[1]["name"] = "node1" + + def test_edge_attr_dict(self): + """Tests that the edge attribute dictionary of the two graphs is + the same object. + + """ + for u, v in self.H.edges(): + assert self.G.edges[u, v] == self.H.edges[u, v] + # Making a change to G should make a change in H and vice versa. + self.G.edges[0, 1]["name"] = "foo" + assert self.G.edges[0, 1]["name"] == self.H.edges[0, 1]["name"] + self.H.edges[3, 4]["name"] = "bar" + assert self.G.edges[3, 4]["name"] == self.H.edges[3, 4]["name"] + # Revert the change, so tests pass with pytest-randomly + self.G.edges[0, 1]["name"] = "edge01" + self.H.edges[3, 4]["name"] = "edge34" + + def test_graph_attr_dict(self): + """Tests that the graph attribute dictionary of the two graphs + is the same object. + + """ + assert self.G.graph is self.H.graph + + def test_readonly(self): + """Tests that the subgraph cannot change the graph structure""" + pytest.raises(nx.NetworkXError, self.H.add_node, 5) + pytest.raises(nx.NetworkXError, self.H.remove_node, 0) + pytest.raises(nx.NetworkXError, self.H.add_edge, 5, 6) + pytest.raises(nx.NetworkXError, self.H.remove_edge, 0, 1) + + @pytest.mark.parametrize("multigraph", (nx.MultiGraph, nx.MultiDiGraph)) + def test_multigraph_filtered_edges(self, multigraph): + """Check edge visibility in FilterMultiInner on edge_subgraph's of + multigraphs. See gh-7724.""" + G = multigraph([("a", "b"), ("a", "c"), ("c", "b")]) + H = nx.edge_subgraph(G, [("a", "b", 0), ("c", "b", 0)]) + assert "c" not in H["a"] + assert not H.has_edge("a", "c") diff --git a/.venv/lib/python3.12/site-packages/networkx/conftest.py b/.venv/lib/python3.12/site-packages/networkx/conftest.py new file mode 100644 index 0000000..9f23922 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/conftest.py @@ -0,0 +1,257 @@ +""" +Testing +======= + +General guidelines for writing good tests: + +- doctests always assume ``import networkx as nx`` so don't add that +- prefer pytest fixtures over classes with setup methods. +- use the ``@pytest.mark.parametrize`` decorator +- use ``pytest.importorskip`` for numpy, scipy, pandas, and matplotlib b/c of PyPy. + and add the module to the relevant entries below. + +""" + +import os +import warnings +from importlib.metadata import entry_points + +import pytest + +import networkx as nx + + +def pytest_addoption(parser): + parser.addoption( + "--runslow", action="store_true", default=False, help="run slow tests" + ) + parser.addoption( + "--backend", + action="store", + default=None, + help="Run tests with a backend by auto-converting nx graphs to backend graphs", + ) + parser.addoption( + "--fallback-to-nx", + action="store_true", + default=False, + help="Run nx function if a backend doesn't implement a dispatchable function" + " (use with --backend)", + ) + + +def pytest_configure(config): + config.addinivalue_line("markers", "slow: mark test as slow to run") + backend = config.getoption("--backend") + if backend is None: + backend = os.environ.get("NETWORKX_TEST_BACKEND") + # nx_loopback backend is only available when testing with a backend + loopback_ep = entry_points(name="nx_loopback", group="networkx.backends") + if not loopback_ep: + warnings.warn( + "\n\n WARNING: Mixed NetworkX configuration! \n\n" + " This environment has mixed configuration for networkx.\n" + " The test object nx_loopback is not configured correctly.\n" + " You should not be seeing this message.\n" + " Try `pip install -e .`, or change your PYTHONPATH\n" + " Make sure python finds the networkx repo you are testing\n\n" + ) + config.backend = backend + if backend: + # We will update `networkx.config.backend_priority` below in `*_modify_items` + # to allow tests to get set up with normal networkx graphs. + nx.utils.backends.backends["nx_loopback"] = loopback_ep["nx_loopback"] + nx.utils.backends.backend_info["nx_loopback"] = {} + nx.config.backends = nx.utils.Config( + nx_loopback=nx.utils.Config(), + **nx.config.backends, + ) + fallback_to_nx = config.getoption("--fallback-to-nx") + if not fallback_to_nx: + fallback_to_nx = os.environ.get("NETWORKX_FALLBACK_TO_NX") + nx.config.fallback_to_nx = bool(fallback_to_nx) + nx.utils.backends._dispatchable.__call__ = ( + nx.utils.backends._dispatchable._call_if_any_backends_installed + ) + + +def pytest_collection_modifyitems(config, items): + # Setting this to True here allows tests to be set up before dispatching + # any function call to a backend. + if config.backend: + # Allow pluggable backends to add markers to tests (such as skip or xfail) + # when running in auto-conversion test mode + backend_name = config.backend + if backend_name != "networkx": + nx.utils.backends._dispatchable._is_testing = True + nx.config.backend_priority.algos = [backend_name] + nx.config.backend_priority.generators = [backend_name] + backend = nx.utils.backends.backends[backend_name].load() + if hasattr(backend, "on_start_tests"): + getattr(backend, "on_start_tests")(items) + + if config.getoption("--runslow"): + # --runslow given in cli: do not skip slow tests + return + skip_slow = pytest.mark.skip(reason="need --runslow option to run") + for item in items: + if "slow" in item.keywords: + item.add_marker(skip_slow) + + +# TODO: The warnings below need to be dealt with, but for now we silence them. +@pytest.fixture(autouse=True) +def set_warnings(): + warnings.filterwarnings( + "ignore", + category=UserWarning, + message=r"Exited (at iteration \d+|postprocessing) with accuracies.*", + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\nThe `normalized`" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="\n\n`compute_v_structures" + ) + warnings.filterwarnings( + "ignore", category=DeprecationWarning, message="Keyword argument 'link'" + ) + + +@pytest.fixture(autouse=True) +def add_nx(doctest_namespace): + doctest_namespace["nx"] = nx + + +# What dependencies are installed? + +try: + import numpy as np + + has_numpy = True +except ImportError: + has_numpy = False + +try: + import scipy as sp + + has_scipy = True +except ImportError: + has_scipy = False + +try: + import matplotlib as mpl + + has_matplotlib = True +except ImportError: + has_matplotlib = False + +try: + import pandas as pd + + has_pandas = True +except ImportError: + has_pandas = False + +try: + import pygraphviz + + has_pygraphviz = True +except ImportError: + has_pygraphviz = False + +try: + import pydot + + has_pydot = True +except ImportError: + has_pydot = False + +try: + import sympy + + has_sympy = True +except ImportError: + has_sympy = False + + +# List of files that pytest should ignore + +collect_ignore = [] + +needs_numpy = [ + "algorithms/approximation/traveling_salesman.py", + "algorithms/centrality/current_flow_closeness.py", + "algorithms/centrality/laplacian.py", + "algorithms/node_classification.py", + "algorithms/non_randomness.py", + "algorithms/polynomials.py", + "algorithms/shortest_paths/dense.py", + "algorithms/structuralholes.py", + "algorithms/tree/mst.py", + "drawing/nx_latex.py", + "generators/expanders.py", + "linalg/bethehessianmatrix.py", + "linalg/laplacianmatrix.py", + "utils/misc.py", +] +needs_scipy = [ + "algorithms/approximation/traveling_salesman.py", + "algorithms/assortativity/correlation.py", + "algorithms/assortativity/mixing.py", + "algorithms/assortativity/pairs.py", + "algorithms/bipartite/matrix.py", + "algorithms/bipartite/spectral.py", + "algorithms/bipartite/link_analysis.py", + "algorithms/centrality/current_flow_betweenness.py", + "algorithms/centrality/current_flow_betweenness_subset.py", + "algorithms/centrality/eigenvector.py", + "algorithms/centrality/katz.py", + "algorithms/centrality/laplacian.py", + "algorithms/centrality/second_order.py", + "algorithms/centrality/subgraph_alg.py", + "algorithms/communicability_alg.py", + "algorithms/community/divisive.py", + "algorithms/distance_measures.py", + "algorithms/link_analysis/hits_alg.py", + "algorithms/link_analysis/pagerank_alg.py", + "algorithms/node_classification.py", + "algorithms/similarity.py", + "algorithms/structuralholes.py", + "algorithms/tree/mst.py", + "algorithms/walks.py", + "convert_matrix.py", + "drawing/layout.py", + "drawing/nx_pylab.py", + "generators/spectral_graph_forge.py", + "generators/geometric.py", + "generators/expanders.py", + "linalg/algebraicconnectivity.py", + "linalg/attrmatrix.py", + "linalg/bethehessianmatrix.py", + "linalg/graphmatrix.py", + "linalg/laplacianmatrix.py", + "linalg/modularitymatrix.py", + "linalg/spectrum.py", + "utils/rcm.py", +] +needs_matplotlib = ["drawing/nx_pylab.py", "generators/classic.py"] +needs_pandas = ["convert_matrix.py"] +needs_pygraphviz = ["drawing/nx_agraph.py"] +needs_pydot = ["drawing/nx_pydot.py"] +needs_sympy = ["algorithms/polynomials.py"] + +if not has_numpy: + collect_ignore += needs_numpy +if not has_scipy: + collect_ignore += needs_scipy +if not has_matplotlib: + collect_ignore += needs_matplotlib +if not has_pandas: + collect_ignore += needs_pandas +if not has_pygraphviz: + collect_ignore += needs_pygraphviz +if not has_pydot: + collect_ignore += needs_pydot +if not has_sympy: + collect_ignore += needs_sympy diff --git a/.venv/lib/python3.12/site-packages/networkx/convert.py b/.venv/lib/python3.12/site-packages/networkx/convert.py new file mode 100644 index 0000000..bf0a502 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/convert.py @@ -0,0 +1,502 @@ +"""Functions to convert NetworkX graphs to and from other formats. + +The preferred way of converting data to a NetworkX graph is through the +graph constructor. The constructor calls the to_networkx_graph() function +which attempts to guess the input type and convert it automatically. + +Examples +-------- +Create a graph with a single edge from a dictionary of dictionaries + +>>> d = {0: {1: 1}} # dict-of-dicts single edge (0,1) +>>> G = nx.Graph(d) + +See Also +-------- +nx_agraph, nx_pydot +""" + +from collections.abc import Collection, Generator, Iterator + +import networkx as nx + +__all__ = [ + "to_networkx_graph", + "from_dict_of_dicts", + "to_dict_of_dicts", + "from_dict_of_lists", + "to_dict_of_lists", + "from_edgelist", + "to_edgelist", +] + + +def to_networkx_graph(data, create_using=None, multigraph_input=False): + """Make a NetworkX graph from a known data structure. + + The preferred way to call this is automatically + from the class constructor + + >>> d = {0: {1: {"weight": 1}}} # dict-of-dicts single edge (0,1) + >>> G = nx.Graph(d) + + instead of the equivalent + + >>> G = nx.from_dict_of_dicts(d) + + Parameters + ---------- + data : object to be converted + + Current known types are: + any NetworkX graph + dict-of-dicts + dict-of-lists + container (e.g. set, list, tuple) of edges + iterator (e.g. itertools.chain) that produces edges + generator of edges + Pandas DataFrame (row per edge) + 2D numpy array + scipy sparse array + pygraphviz agraph + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + multigraph_input : bool (default False) + If True and data is a dict_of_dicts, + try to create a multigraph assuming dict_of_dict_of_lists. + If data and create_using are both multigraphs then create + a multigraph from a multigraph. + + """ + # NX graph + if hasattr(data, "adj"): + try: + result = from_dict_of_dicts( + data.adj, + create_using=create_using, + multigraph_input=data.is_multigraph(), + ) + # data.graph should be dict-like + result.graph.update(data.graph) + # data.nodes should be dict-like + # result.add_node_from(data.nodes.items()) possible but + # for custom node_attr_dict_factory which may be hashable + # will be unexpected behavior + for n, dd in data.nodes.items(): + result._node[n].update(dd) + return result + except Exception as err: + raise nx.NetworkXError("Input is not a correct NetworkX graph.") from err + + # dict of dicts/lists + if isinstance(data, dict): + try: + return from_dict_of_dicts( + data, create_using=create_using, multigraph_input=multigraph_input + ) + except Exception as err1: + if multigraph_input is True: + raise nx.NetworkXError( + f"converting multigraph_input raised:\n{type(err1)}: {err1}" + ) + try: + return from_dict_of_lists(data, create_using=create_using) + except Exception as err2: + raise TypeError("Input is not known type.") from err2 + + # edgelists + if isinstance(data, list | tuple | nx.reportviews.EdgeViewABC | Iterator): + try: + return from_edgelist(data, create_using=create_using) + except: + pass + + # pygraphviz agraph + if hasattr(data, "is_strict"): + try: + return nx.nx_agraph.from_agraph(data, create_using=create_using) + except Exception as err: + raise nx.NetworkXError("Input is not a correct pygraphviz graph.") from err + + # Pandas DataFrame + try: + import pandas as pd + + if isinstance(data, pd.DataFrame): + if data.shape[0] == data.shape[1]: + try: + return nx.from_pandas_adjacency(data, create_using=create_using) + except Exception as err: + msg = "Input is not a correct Pandas DataFrame adjacency matrix." + raise nx.NetworkXError(msg) from err + else: + try: + return nx.from_pandas_edgelist( + data, edge_attr=True, create_using=create_using + ) + except Exception as err: + msg = "Input is not a correct Pandas DataFrame edge-list." + raise nx.NetworkXError(msg) from err + except ImportError: + pass + + # numpy array + try: + import numpy as np + + if isinstance(data, np.ndarray): + try: + return nx.from_numpy_array(data, create_using=create_using) + except Exception as err: + raise nx.NetworkXError( + f"Failed to interpret array as an adjacency matrix." + ) from err + except ImportError: + pass + + # scipy sparse array - any format + try: + import scipy as sp + + if hasattr(data, "format"): + try: + return nx.from_scipy_sparse_array(data, create_using=create_using) + except Exception as err: + raise nx.NetworkXError( + "Input is not a correct scipy sparse array type." + ) from err + except ImportError: + pass + + # Note: most general check - should remain last in order of execution + # Includes containers (e.g. list, set, dict, etc.), generators, and + # iterators (e.g. itertools.chain) of edges + + if isinstance(data, Collection | Generator | Iterator): + try: + return from_edgelist(data, create_using=create_using) + except Exception as err: + raise nx.NetworkXError("Input is not a valid edge list") from err + + raise nx.NetworkXError("Input is not a known data type for conversion.") + + +@nx._dispatchable +def to_dict_of_lists(G, nodelist=None): + """Returns adjacency representation of graph as a dictionary of lists. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list + Use only nodes specified in nodelist + + Notes + ----- + Completely ignores edge data for MultiGraph and MultiDiGraph. + + """ + if nodelist is None: + nodelist = G + + d = {} + for n in nodelist: + d[n] = [nbr for nbr in G.neighbors(n) if nbr in nodelist] + return d + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_dict_of_lists(d, create_using=None): + """Returns a graph from a dictionary of lists. + + Parameters + ---------- + d : dictionary of lists + A dictionary of lists adjacency representation. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Examples + -------- + >>> dol = {0: [1]} # single edge (0,1) + >>> G = nx.from_dict_of_lists(dol) + + or + + >>> G = nx.Graph(dol) # use Graph constructor + + """ + G = nx.empty_graph(0, create_using) + G.add_nodes_from(d) + if G.is_multigraph() and not G.is_directed(): + # a dict_of_lists can't show multiedges. BUT for undirected graphs, + # each edge shows up twice in the dict_of_lists. + # So we need to treat this case separately. + seen = {} + for node, nbrlist in d.items(): + for nbr in nbrlist: + if nbr not in seen: + G.add_edge(node, nbr) + seen[node] = 1 # don't allow reverse edge to show up + else: + G.add_edges_from( + ((node, nbr) for node, nbrlist in d.items() for nbr in nbrlist) + ) + return G + + +def to_dict_of_dicts(G, nodelist=None, edge_data=None): + """Returns adjacency representation of graph as a dictionary of dictionaries. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list + Use only nodes specified in nodelist. If None, all nodes in G. + + edge_data : scalar, optional (default: the G edgedatadict for each edge) + If provided, the value of the dictionary will be set to `edge_data` for + all edges. Usual values could be `1` or `True`. If `edge_data` is + `None` (the default), the edgedata in `G` is used, resulting in a + dict-of-dict-of-dicts. If `G` is a MultiGraph, the result will be a + dict-of-dict-of-dict-of-dicts. See Notes for an approach to customize + handling edge data. `edge_data` should *not* be a container as it will + be the same container for all the edges. + + Returns + ------- + dod : dict + A nested dictionary representation of `G`. Note that the level of + nesting depends on the type of `G` and the value of `edge_data` + (see Examples). + + See Also + -------- + from_dict_of_dicts, to_dict_of_lists + + Notes + ----- + For a more custom approach to handling edge data, try:: + + dod = { + n: {nbr: custom(n, nbr, dd) for nbr, dd in nbrdict.items()} + for n, nbrdict in G.adj.items() + } + + where `custom` returns the desired edge data for each edge between `n` and + `nbr`, given existing edge data `dd`. + + Examples + -------- + >>> G = nx.path_graph(3) + >>> nx.to_dict_of_dicts(G) + {0: {1: {}}, 1: {0: {}, 2: {}}, 2: {1: {}}} + + Edge data is preserved by default (``edge_data=None``), resulting + in dict-of-dict-of-dicts where the innermost dictionary contains the + edge data: + + >>> G = nx.Graph() + >>> G.add_edges_from( + ... [ + ... (0, 1, {"weight": 1.0}), + ... (1, 2, {"weight": 2.0}), + ... (2, 0, {"weight": 1.0}), + ... ] + ... ) + >>> d = nx.to_dict_of_dicts(G) + >>> d # doctest: +SKIP + {0: {1: {'weight': 1.0}, 2: {'weight': 1.0}}, + 1: {0: {'weight': 1.0}, 2: {'weight': 2.0}}, + 2: {1: {'weight': 2.0}, 0: {'weight': 1.0}}} + >>> d[1][2]["weight"] + 2.0 + + If `edge_data` is not `None`, edge data in the original graph (if any) is + replaced: + + >>> d = nx.to_dict_of_dicts(G, edge_data=1) + >>> d + {0: {1: 1, 2: 1}, 1: {0: 1, 2: 1}, 2: {1: 1, 0: 1}} + >>> d[1][2] + 1 + + This also applies to MultiGraphs: edge data is preserved by default: + + >>> G = nx.MultiGraph() + >>> G.add_edge(0, 1, key="a", weight=1.0) + 'a' + >>> G.add_edge(0, 1, key="b", weight=5.0) + 'b' + >>> d = nx.to_dict_of_dicts(G) + >>> d # doctest: +SKIP + {0: {1: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}, + 1: {0: {'a': {'weight': 1.0}, 'b': {'weight': 5.0}}}} + >>> d[0][1]["b"]["weight"] + 5.0 + + But multi edge data is lost if `edge_data` is not `None`: + + >>> d = nx.to_dict_of_dicts(G, edge_data=10) + >>> d + {0: {1: 10}, 1: {0: 10}} + """ + dod = {} + if nodelist is None: + if edge_data is None: + for u, nbrdict in G.adjacency(): + dod[u] = nbrdict.copy() + else: # edge_data is not None + for u, nbrdict in G.adjacency(): + dod[u] = dod.fromkeys(nbrdict, edge_data) + else: # nodelist is not None + if edge_data is None: + for u in nodelist: + dod[u] = {} + for v, data in ((v, data) for v, data in G[u].items() if v in nodelist): + dod[u][v] = data + else: # nodelist and edge_data are not None + for u in nodelist: + dod[u] = {} + for v in (v for v in G[u] if v in nodelist): + dod[u][v] = edge_data + return dod + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_dict_of_dicts(d, create_using=None, multigraph_input=False): + """Returns a graph from a dictionary of dictionaries. + + Parameters + ---------- + d : dictionary of dictionaries + A dictionary of dictionaries adjacency representation. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + multigraph_input : bool (default False) + When True, the dict `d` is assumed + to be a dict-of-dict-of-dict-of-dict structure keyed by + node to neighbor to edge keys to edge data for multi-edges. + Otherwise this routine assumes dict-of-dict-of-dict keyed by + node to neighbor to edge data. + + Examples + -------- + >>> dod = {0: {1: {"weight": 1}}} # single edge (0,1) + >>> G = nx.from_dict_of_dicts(dod) + + or + + >>> G = nx.Graph(dod) # use Graph constructor + + """ + G = nx.empty_graph(0, create_using) + G.add_nodes_from(d) + # does dict d represent a MultiGraph or MultiDiGraph? + if multigraph_input: + if G.is_directed(): + if G.is_multigraph(): + G.add_edges_from( + (u, v, key, data) + for u, nbrs in d.items() + for v, datadict in nbrs.items() + for key, data in datadict.items() + ) + else: + G.add_edges_from( + (u, v, data) + for u, nbrs in d.items() + for v, datadict in nbrs.items() + for key, data in datadict.items() + ) + else: # Undirected + if G.is_multigraph(): + seen = set() # don't add both directions of undirected graph + for u, nbrs in d.items(): + for v, datadict in nbrs.items(): + if (u, v) not in seen: + G.add_edges_from( + (u, v, key, data) for key, data in datadict.items() + ) + seen.add((v, u)) + else: + seen = set() # don't add both directions of undirected graph + for u, nbrs in d.items(): + for v, datadict in nbrs.items(): + if (u, v) not in seen: + G.add_edges_from( + (u, v, data) for key, data in datadict.items() + ) + seen.add((v, u)) + + else: # not a multigraph to multigraph transfer + if G.is_multigraph() and not G.is_directed(): + # d can have both representations u-v, v-u in dict. Only add one. + # We don't need this check for digraphs since we add both directions, + # or for Graph() since it is done implicitly (parallel edges not allowed) + seen = set() + for u, nbrs in d.items(): + for v, data in nbrs.items(): + if (u, v) not in seen: + G.add_edge(u, v, key=0) + G[u][v][0].update(data) + seen.add((v, u)) + else: + G.add_edges_from( + ((u, v, data) for u, nbrs in d.items() for v, data in nbrs.items()) + ) + return G + + +@nx._dispatchable(preserve_edge_attrs=True) +def to_edgelist(G, nodelist=None): + """Returns a list of edges in the graph. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list + Use only nodes specified in nodelist + + """ + if nodelist is None: + return G.edges(data=True) + return G.edges(nodelist, data=True) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_edgelist(edgelist, create_using=None): + """Returns a graph from a list of edges. + + Parameters + ---------- + edgelist : list or iterator + Edge tuples + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Examples + -------- + >>> edgelist = [(0, 1)] # single edge (0,1) + >>> G = nx.from_edgelist(edgelist) + + or + + >>> G = nx.Graph(edgelist) # use Graph constructor + + """ + G = nx.empty_graph(0, create_using) + G.add_edges_from(edgelist) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/convert_matrix.py b/.venv/lib/python3.12/site-packages/networkx/convert_matrix.py new file mode 100644 index 0000000..72a5517 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/convert_matrix.py @@ -0,0 +1,1314 @@ +"""Functions to convert NetworkX graphs to and from common data containers +like numpy arrays, scipy sparse arrays, and pandas DataFrames. + +The preferred way of converting data to a NetworkX graph is through the +graph constructor. The constructor calls the `~networkx.convert.to_networkx_graph` +function which attempts to guess the input type and convert it automatically. + +Examples +-------- +Create a 10 node random graph from a numpy array + +>>> import numpy as np +>>> rng = np.random.default_rng() +>>> a = rng.integers(low=0, high=2, size=(10, 10)) +>>> DG = nx.from_numpy_array(a, create_using=nx.DiGraph) + +or equivalently: + +>>> DG = nx.DiGraph(a) + +which calls `from_numpy_array` internally based on the type of ``a``. + +See Also +-------- +nx_agraph, nx_pydot +""" + +import itertools +from collections import defaultdict + +import networkx as nx + +__all__ = [ + "from_pandas_adjacency", + "to_pandas_adjacency", + "from_pandas_edgelist", + "to_pandas_edgelist", + "from_scipy_sparse_array", + "to_scipy_sparse_array", + "from_numpy_array", + "to_numpy_array", +] + + +@nx._dispatchable(edge_attrs="weight") +def to_pandas_adjacency( + G, + nodelist=None, + dtype=None, + order=None, + multigraph_weight=sum, + weight="weight", + nonedge=0.0, +): + """Returns the graph adjacency matrix as a Pandas DataFrame. + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the Pandas DataFrame. + + nodelist : list, optional + The rows and columns are ordered according to the nodes in `nodelist`. + If `nodelist` is None, then the ordering is produced by G.nodes(). + + multigraph_weight : {sum, min, max}, optional + An operator that determines how weights in multigraphs are handled. + The default is to sum the weights of the multiple edges. + + weight : string or None, optional + The edge attribute that holds the numerical value used for + the edge weight. If an edge does not have that attribute, then the + value 1 is used instead. + + nonedge : float, optional + The matrix values corresponding to nonedges are typically set to zero. + However, this could be undesirable if there are matrix values + corresponding to actual edges that also have the value zero. If so, + one might prefer nonedges to have some other value, such as nan. + + Returns + ------- + df : Pandas DataFrame + Graph adjacency matrix + + Notes + ----- + For directed graphs, entry i,j corresponds to an edge from i to j. + + The DataFrame entries are assigned to the weight edge attribute. When + an edge does not have a weight attribute, the value of the entry is set to + the number 1. For multiple (parallel) edges, the values of the entries + are determined by the 'multigraph_weight' parameter. The default is to + sum the weight attributes for each of the parallel edges. + + When `nodelist` does not contain every node in `G`, the matrix is built + from the subgraph of `G` that is induced by the nodes in `nodelist`. + + The convention used for self-loop edges in graphs is to assign the + diagonal matrix entry value to the weight attribute of the edge + (or the number 1 if the edge has no weight attribute). If the + alternate convention of doubling the edge weight is desired the + resulting Pandas DataFrame can be modified as follows:: + + >>> import pandas as pd + >>> G = nx.Graph([(1, 1), (2, 2)]) + >>> df = nx.to_pandas_adjacency(G) + >>> df + 1 2 + 1 1.0 0.0 + 2 0.0 1.0 + >>> diag_idx = list(range(len(df))) + >>> df.iloc[diag_idx, diag_idx] *= 2 + >>> df + 1 2 + 1 2.0 0.0 + 2 0.0 2.0 + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> G.add_edge(0, 1, weight=2) + 0 + >>> G.add_edge(1, 0) + 0 + >>> G.add_edge(2, 2, weight=3) + 0 + >>> G.add_edge(2, 2) + 1 + >>> nx.to_pandas_adjacency(G, nodelist=[0, 1, 2], dtype=int) + 0 1 2 + 0 0 2 0 + 1 1 0 0 + 2 0 0 4 + + """ + import pandas as pd + + M = to_numpy_array( + G, + nodelist=nodelist, + dtype=dtype, + order=order, + multigraph_weight=multigraph_weight, + weight=weight, + nonedge=nonedge, + ) + if nodelist is None: + nodelist = list(G) + return pd.DataFrame(data=M, index=nodelist, columns=nodelist) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_pandas_adjacency(df, create_using=None): + r"""Returns a graph from Pandas DataFrame. + + The Pandas DataFrame is interpreted as an adjacency matrix for the graph. + + Parameters + ---------- + df : Pandas DataFrame + An adjacency matrix representation of a graph + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Notes + ----- + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of df corresponds to an edge from i to j. + + If `df` has a single data type for each entry it will be converted to an + appropriate Python data type. + + If you have node attributes stored in a separate dataframe `df_nodes`, + you can load those attributes to the graph `G` using the following code:: + + df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]}) + G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows()) + + If `df` has a user-specified compound data type the names + of the data fields will be used as attribute keys in the resulting + NetworkX graph. + + See Also + -------- + to_pandas_adjacency + + Examples + -------- + Simple integer weights on edges: + + >>> import pandas as pd + >>> pd.options.display.max_columns = 20 + >>> df = pd.DataFrame([[1, 1], [2, 1]]) + >>> df + 0 1 + 0 1 1 + 1 2 1 + >>> G = nx.from_pandas_adjacency(df) + >>> G.name = "Graph from pandas adjacency matrix" + >>> print(G) + Graph named 'Graph from pandas adjacency matrix' with 2 nodes and 3 edges + """ + + try: + df = df[df.index] + except Exception as err: + missing = list(set(df.index).difference(set(df.columns))) + msg = f"{missing} not in columns" + raise nx.NetworkXError("Columns must match Indices.", msg) from err + + A = df.values + G = from_numpy_array(A, create_using=create_using, nodelist=df.columns) + + return G + + +@nx._dispatchable(preserve_edge_attrs=True) +def to_pandas_edgelist( + G, + source="source", + target="target", + nodelist=None, + dtype=None, + edge_key=None, +): + """Returns the graph edge list as a Pandas DataFrame. + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the Pandas DataFrame. + + source : str or int, optional + A valid column name (string or integer) for the source nodes (for the + directed case). + + target : str or int, optional + A valid column name (string or integer) for the target nodes (for the + directed case). + + nodelist : list, optional + Use only nodes specified in nodelist + + dtype : dtype, default None + Use to create the DataFrame. Data type to force. + Only a single dtype is allowed. If None, infer. + + edge_key : str or int or None, optional (default=None) + A valid column name (string or integer) for the edge keys (for the + multigraph case). If None, edge keys are not stored in the DataFrame. + + Returns + ------- + df : Pandas DataFrame + Graph edge list + + Examples + -------- + >>> G = nx.Graph( + ... [ + ... ("A", "B", {"cost": 1, "weight": 7}), + ... ("C", "E", {"cost": 9, "weight": 10}), + ... ] + ... ) + >>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"]) + >>> df[["source", "target", "cost", "weight"]] + source target cost weight + 0 A B 1 7 + 1 C E 9 10 + + >>> G = nx.MultiGraph([("A", "B", {"cost": 1}), ("A", "B", {"cost": 9})]) + >>> df = nx.to_pandas_edgelist(G, nodelist=["A", "C"], edge_key="ekey") + >>> df[["source", "target", "cost", "ekey"]] + source target cost ekey + 0 A B 1 0 + 1 A B 9 1 + + """ + import pandas as pd + + if nodelist is None: + edgelist = G.edges(data=True) + else: + edgelist = G.edges(nodelist, data=True) + source_nodes = [s for s, _, _ in edgelist] + target_nodes = [t for _, t, _ in edgelist] + + all_attrs = set().union(*(d.keys() for _, _, d in edgelist)) + if source in all_attrs: + raise nx.NetworkXError(f"Source name {source!r} is an edge attr name") + if target in all_attrs: + raise nx.NetworkXError(f"Target name {target!r} is an edge attr name") + + nan = float("nan") + edge_attr = {k: [d.get(k, nan) for _, _, d in edgelist] for k in all_attrs} + + if G.is_multigraph() and edge_key is not None: + if edge_key in all_attrs: + raise nx.NetworkXError(f"Edge key name {edge_key!r} is an edge attr name") + edge_keys = [k for _, _, k in G.edges(keys=True)] + edgelistdict = {source: source_nodes, target: target_nodes, edge_key: edge_keys} + else: + edgelistdict = {source: source_nodes, target: target_nodes} + + edgelistdict.update(edge_attr) + return pd.DataFrame(edgelistdict, dtype=dtype) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_pandas_edgelist( + df, + source="source", + target="target", + edge_attr=None, + create_using=None, + edge_key=None, +): + """Returns a graph from Pandas DataFrame containing an edge list. + + The Pandas DataFrame should contain at least two columns of node names and + zero or more columns of edge attributes. Each row will be processed as one + edge instance. + + Note: This function iterates over DataFrame.values, which is not + guaranteed to retain the data type across columns in the row. This is only + a problem if your row is entirely numeric and a mix of ints and floats. In + that case, all values will be returned as floats. See the + DataFrame.iterrows documentation for an example. + + Parameters + ---------- + df : Pandas DataFrame + An edge list representation of a graph + + source : str or int + A valid column name (string or integer) for the source nodes (for the + directed case). + + target : str or int + A valid column name (string or integer) for the target nodes (for the + directed case). + + edge_attr : str or int, iterable, True, or None + A valid column name (str or int) or iterable of column names that are + used to retrieve items and add them to the graph as edge attributes. + If `True`, all columns will be added except `source`, `target` and `edge_key`. + If `None`, no edge attributes are added to the graph. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + edge_key : str or None, optional (default=None) + A valid column name for the edge keys (for a MultiGraph). The values in + this column are used for the edge keys when adding edges if create_using + is a multigraph. + + Notes + ----- + If you have node attributes stored in a separate dataframe `df_nodes`, + you can load those attributes to the graph `G` using the following code:: + + df_nodes = pd.DataFrame({"node_id": [1, 2, 3], "attribute1": ["A", "B", "C"]}) + G.add_nodes_from((n, dict(d)) for n, d in df_nodes.iterrows()) + + See Also + -------- + to_pandas_edgelist + + Examples + -------- + Simple integer weights on edges: + + >>> import pandas as pd + >>> pd.options.display.max_columns = 20 + >>> import numpy as np + >>> rng = np.random.RandomState(seed=5) + >>> ints = rng.randint(1, 11, size=(3, 2)) + >>> a = ["A", "B", "C"] + >>> b = ["D", "A", "E"] + >>> df = pd.DataFrame(ints, columns=["weight", "cost"]) + >>> df[0] = a + >>> df["b"] = b + >>> df[["weight", "cost", 0, "b"]] + weight cost 0 b + 0 4 7 A D + 1 7 1 B A + 2 10 9 C E + >>> G = nx.from_pandas_edgelist(df, 0, "b", ["weight", "cost"]) + >>> G["E"]["C"]["weight"] + 10 + >>> G["E"]["C"]["cost"] + 9 + >>> edges = pd.DataFrame( + ... { + ... "source": [0, 1, 2], + ... "target": [2, 2, 3], + ... "weight": [3, 4, 5], + ... "color": ["red", "blue", "blue"], + ... } + ... ) + >>> G = nx.from_pandas_edgelist(edges, edge_attr=True) + >>> G[0][2]["color"] + 'red' + + Build multigraph with custom keys: + + >>> edges = pd.DataFrame( + ... { + ... "source": [0, 1, 2, 0], + ... "target": [2, 2, 3, 2], + ... "my_edge_key": ["A", "B", "C", "D"], + ... "weight": [3, 4, 5, 6], + ... "color": ["red", "blue", "blue", "blue"], + ... } + ... ) + >>> G = nx.from_pandas_edgelist( + ... edges, + ... edge_key="my_edge_key", + ... edge_attr=["weight", "color"], + ... create_using=nx.MultiGraph(), + ... ) + >>> G[0][2] + AtlasView({'A': {'weight': 3, 'color': 'red'}, 'D': {'weight': 6, 'color': 'blue'}}) + + + """ + g = nx.empty_graph(0, create_using) + + if edge_attr is None: + if g.is_multigraph() and edge_key is not None: + for u, v, k in zip(df[source], df[target], df[edge_key]): + g.add_edge(u, v, k) + else: + g.add_edges_from(zip(df[source], df[target])) + return g + + reserved_columns = [source, target] + if g.is_multigraph() and edge_key is not None: + reserved_columns.append(edge_key) + + # Additional columns requested + attr_col_headings = [] + attribute_data = [] + if edge_attr is True: + attr_col_headings = [c for c in df.columns if c not in reserved_columns] + elif isinstance(edge_attr, list | tuple): + attr_col_headings = edge_attr + else: + attr_col_headings = [edge_attr] + if len(attr_col_headings) == 0: + raise nx.NetworkXError( + f"Invalid edge_attr argument: No columns found with name: {attr_col_headings}" + ) + + try: + attribute_data = zip(*[df[col] for col in attr_col_headings]) + except (KeyError, TypeError) as err: + msg = f"Invalid edge_attr argument: {edge_attr}" + raise nx.NetworkXError(msg) from err + + if g.is_multigraph(): + # => append the edge keys from the df to the bundled data + if edge_key is not None: + try: + multigraph_edge_keys = df[edge_key] + attribute_data = zip(attribute_data, multigraph_edge_keys) + except (KeyError, TypeError) as err: + msg = f"Invalid edge_key argument: {edge_key}" + raise nx.NetworkXError(msg) from err + + for s, t, attrs in zip(df[source], df[target], attribute_data): + if edge_key is not None: + attrs, multigraph_edge_key = attrs + key = g.add_edge(s, t, key=multigraph_edge_key) + else: + key = g.add_edge(s, t) + + g[s][t][key].update(zip(attr_col_headings, attrs)) + else: + for s, t, attrs in zip(df[source], df[target], attribute_data): + g.add_edge(s, t) + g[s][t].update(zip(attr_col_headings, attrs)) + + return g + + +@nx._dispatchable(edge_attrs="weight") +def to_scipy_sparse_array(G, nodelist=None, dtype=None, weight="weight", format="csr"): + """Returns the graph adjacency matrix as a SciPy sparse array. + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the sparse array. + + nodelist : list, optional + The rows and columns are ordered according to the nodes in `nodelist`. + If `nodelist` is None, then the ordering is produced by ``G.nodes()``. + + dtype : NumPy data-type, optional + A valid NumPy dtype used to initialize the array. If None, then the + NumPy default is used. + + weight : string or None, optional (default='weight') + The edge attribute that holds the numerical value used for + the edge weight. If None then all edge weights are 1. + + format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'} + The format of the sparse array to be returned (default 'csr'). For + some algorithms different implementations of sparse arrays + can perform better. See [1]_ for details. + + Returns + ------- + A : SciPy sparse array + Graph adjacency matrix. + + Notes + ----- + For directed graphs, matrix entry ``i, j`` corresponds to an edge from + ``i`` to ``j``. + + The values of the adjacency matrix are populated using the edge attribute held in + parameter `weight`. When an edge does not have that attribute, the + value of the entry is 1. + + For multiple edges the matrix values are the sums of the edge weights. + + When `nodelist` does not contain every node in `G`, the adjacency matrix + is built from the subgraph of `G` that is induced by the nodes in + `nodelist`. + + The convention used for self-loop edges in graphs is to assign the + diagonal matrix entry value to the weight attribute of the edge + (or the number 1 if the edge has no weight attribute). If the + alternate convention of doubling the edge weight is desired the + resulting array can be modified as follows:: + + >>> G = nx.Graph([(1, 1)]) + >>> A = nx.to_scipy_sparse_array(G) + >>> A.toarray() + array([[1]]) + >>> A.setdiag(A.diagonal() * 2) + >>> A.toarray() + array([[2]]) + + Examples + -------- + + Basic usage: + + >>> G = nx.path_graph(4) + >>> A = nx.to_scipy_sparse_array(G) + >>> A # doctest: +SKIP + + + >>> A.toarray() + array([[0, 1, 0, 0], + [1, 0, 1, 0], + [0, 1, 0, 1], + [0, 0, 1, 0]]) + + .. note:: The `toarray` method is used in these examples to better visualize + the adjacency matrix. For a dense representation of the adjaceny matrix, + use `to_numpy_array` instead. + + Directed graphs: + + >>> G = nx.DiGraph([(0, 1), (1, 2), (2, 3)]) + >>> nx.to_scipy_sparse_array(G).toarray() + array([[0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + [0, 0, 0, 0]]) + + >>> H = G.reverse() + >>> H.edges + OutEdgeView([(1, 0), (2, 1), (3, 2)]) + >>> nx.to_scipy_sparse_array(H).toarray() + array([[0, 0, 0, 0], + [1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0]]) + + By default, the order of the rows/columns of the adjacency matrix is determined + by the ordering of the nodes in `G`: + + >>> G = nx.Graph() + >>> G.add_nodes_from([3, 5, 0, 1]) + >>> G.add_edges_from([(1, 3), (1, 5)]) + >>> nx.to_scipy_sparse_array(G).toarray() + array([[0, 0, 0, 1], + [0, 0, 0, 1], + [0, 0, 0, 0], + [1, 1, 0, 0]]) + + The ordering of the rows can be changed with `nodelist`: + + >>> ordered = [0, 1, 3, 5] + >>> nx.to_scipy_sparse_array(G, nodelist=ordered).toarray() + array([[0, 0, 0, 0], + [0, 0, 1, 1], + [0, 1, 0, 0], + [0, 1, 0, 0]]) + + If `nodelist` contains a subset of the nodes in `G`, the adjacency matrix + for the node-induced subgraph is produced: + + >>> nx.to_scipy_sparse_array(G, nodelist=[1, 3, 5]).toarray() + array([[0, 1, 1], + [1, 0, 0], + [1, 0, 0]]) + + The values of the adjacency matrix are drawn from the edge attribute + specified by the `weight` parameter: + + >>> G = nx.path_graph(4) + >>> nx.set_edge_attributes( + ... G, values={(0, 1): 1, (1, 2): 10, (2, 3): 2}, name="weight" + ... ) + >>> nx.set_edge_attributes( + ... G, values={(0, 1): 50, (1, 2): 35, (2, 3): 10}, name="capacity" + ... ) + >>> nx.to_scipy_sparse_array(G).toarray() # Default weight="weight" + array([[ 0, 1, 0, 0], + [ 1, 0, 10, 0], + [ 0, 10, 0, 2], + [ 0, 0, 2, 0]]) + >>> nx.to_scipy_sparse_array(G, weight="capacity").toarray() + array([[ 0, 50, 0, 0], + [50, 0, 35, 0], + [ 0, 35, 0, 10], + [ 0, 0, 10, 0]]) + + Any edges that don't have a `weight` attribute default to 1: + + >>> G[1][2].pop("capacity") + 35 + >>> nx.to_scipy_sparse_array(G, weight="capacity").toarray() + array([[ 0, 50, 0, 0], + [50, 0, 1, 0], + [ 0, 1, 0, 10], + [ 0, 0, 10, 0]]) + + When `G` is a multigraph, the values in the adjacency matrix are given by + the sum of the `weight` edge attribute over each edge key: + + >>> G = nx.MultiDiGraph([(0, 1), (0, 1), (0, 1), (2, 0)]) + >>> nx.to_scipy_sparse_array(G).toarray() + array([[0, 3, 0], + [0, 0, 0], + [1, 0, 0]]) + + References + ---------- + .. [1] Scipy Dev. References, "Sparse Arrays", + https://docs.scipy.org/doc/scipy/reference/sparse.html + """ + import scipy as sp + + if len(G) == 0: + raise nx.NetworkXError("Graph has no nodes or edges") + + if nodelist is None: + nodelist = list(G) + nlen = len(G) + else: + nlen = len(nodelist) + if nlen == 0: + raise nx.NetworkXError("nodelist has no nodes") + nodeset = set(G.nbunch_iter(nodelist)) + if nlen != len(nodeset): + for n in nodelist: + if n not in G: + raise nx.NetworkXError(f"Node {n} in nodelist is not in G") + raise nx.NetworkXError("nodelist contains duplicates.") + if nlen < len(G): + G = G.subgraph(nodelist) + + index = dict(zip(nodelist, range(nlen))) + coefficients = zip( + *((index[u], index[v], wt) for u, v, wt in G.edges(data=weight, default=1)) + ) + try: + row, col, data = coefficients + except ValueError: + # there is no edge in the subgraph + row, col, data = [], [], [] + + if G.is_directed(): + A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, nlen), dtype=dtype) + else: + # symmetrize matrix + d = data + data + r = row + col + c = col + row + # selfloop entries get double counted when symmetrizing + # so we subtract the data on the diagonal + selfloops = list(nx.selfloop_edges(G, data=weight, default=1)) + if selfloops: + diag_index, diag_data = zip(*((index[u], -wt) for u, v, wt in selfloops)) + d += diag_data + r += diag_index + c += diag_index + A = sp.sparse.coo_array((d, (r, c)), shape=(nlen, nlen), dtype=dtype) + try: + return A.asformat(format) + except ValueError as err: + raise nx.NetworkXError(f"Unknown sparse matrix format: {format}") from err + + +def _csr_gen_triples(A): + """Converts a SciPy sparse array in **Compressed Sparse Row** format to + an iterable of weighted edge triples. + + """ + nrows = A.shape[0] + indptr, dst_indices, data = A.indptr, A.indices, A.data + import numpy as np + + src_indices = np.repeat(np.arange(nrows), np.diff(indptr)) + return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist()) + + +def _csc_gen_triples(A): + """Converts a SciPy sparse array in **Compressed Sparse Column** format to + an iterable of weighted edge triples. + + """ + ncols = A.shape[1] + indptr, src_indices, data = A.indptr, A.indices, A.data + import numpy as np + + dst_indices = np.repeat(np.arange(ncols), np.diff(indptr)) + return zip(src_indices.tolist(), dst_indices.tolist(), A.data.tolist()) + + +def _coo_gen_triples(A): + """Converts a SciPy sparse array in **Coordinate** format to an iterable + of weighted edge triples. + + """ + return zip(A.row.tolist(), A.col.tolist(), A.data.tolist()) + + +def _dok_gen_triples(A): + """Converts a SciPy sparse array in **Dictionary of Keys** format to an + iterable of weighted edge triples. + + """ + for (r, c), v in A.items(): + # Use `v.item()` to convert a NumPy scalar to the appropriate Python scalar + yield int(r), int(c), v.item() + + +def _generate_weighted_edges(A): + """Returns an iterable over (u, v, w) triples, where u and v are adjacent + vertices and w is the weight of the edge joining u and v. + + `A` is a SciPy sparse array (in any format). + + """ + if A.format == "csr": + return _csr_gen_triples(A) + if A.format == "csc": + return _csc_gen_triples(A) + if A.format == "dok": + return _dok_gen_triples(A) + # If A is in any other format (including COO), convert it to COO format. + return _coo_gen_triples(A.tocoo()) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_scipy_sparse_array( + A, parallel_edges=False, create_using=None, edge_attribute="weight" +): + """Creates a new graph from an adjacency matrix given as a SciPy sparse + array. + + Parameters + ---------- + A: scipy.sparse array + An adjacency matrix representation of a graph + + parallel_edges : Boolean + If this is True, `create_using` is a multigraph, and `A` is an + integer matrix, then entry *(i, j)* in the matrix is interpreted as the + number of parallel edges joining vertices *i* and *j* in the graph. + If it is False, then the entries in the matrix are interpreted as + the weight of a single edge joining the vertices. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + edge_attribute: string + Name of edge attribute to store matrix numeric value. The data will + have the same type as the matrix entry (int, float, (real,imag)). + + Notes + ----- + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of A corresponds to an edge from i to j. + + If `create_using` is :class:`networkx.MultiGraph` or + :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the + entries of `A` are of type :class:`int`, then this function returns a + multigraph (constructed from `create_using`) with parallel edges. + In this case, `edge_attribute` will be ignored. + + If `create_using` indicates an undirected multigraph, then only the edges + indicated by the upper triangle of the matrix `A` will be added to the + graph. + + Examples + -------- + >>> import scipy as sp + >>> A = sp.sparse.eye(2, 2, 1) + >>> G = nx.from_scipy_sparse_array(A) + + If `create_using` indicates a multigraph and the matrix has only integer + entries and `parallel_edges` is False, then the entries will be treated + as weights for edges joining the nodes (without creating parallel edges): + + >>> A = sp.sparse.csr_array([[1, 1], [1, 2]]) + >>> G = nx.from_scipy_sparse_array(A, create_using=nx.MultiGraph) + >>> G[1][1] + AtlasView({0: {'weight': 2}}) + + If `create_using` indicates a multigraph and the matrix has only integer + entries and `parallel_edges` is True, then the entries will be treated + as the number of parallel edges joining those two vertices: + + >>> A = sp.sparse.csr_array([[1, 1], [1, 2]]) + >>> G = nx.from_scipy_sparse_array( + ... A, parallel_edges=True, create_using=nx.MultiGraph + ... ) + >>> G[1][1] + AtlasView({0: {'weight': 1}, 1: {'weight': 1}}) + + """ + G = nx.empty_graph(0, create_using) + n, m = A.shape + if n != m: + raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}") + # Make sure we get even the isolated nodes of the graph. + G.add_nodes_from(range(n)) + # Create an iterable over (u, v, w) triples and for each triple, add an + # edge from u to v with weight w. + triples = _generate_weighted_edges(A) + # If the entries in the adjacency matrix are integers, the graph is a + # multigraph, and parallel_edges is True, then create parallel edges, each + # with weight 1, for each entry in the adjacency matrix. Otherwise, create + # one edge for each positive entry in the adjacency matrix and set the + # weight of that edge to be the entry in the matrix. + if A.dtype.kind in ("i", "u") and G.is_multigraph() and parallel_edges: + chain = itertools.chain.from_iterable + # The following line is equivalent to: + # + # for (u, v) in edges: + # for d in range(A[u, v]): + # G.add_edge(u, v, weight=1) + # + triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples) + # If we are creating an undirected multigraph, only add the edges from the + # upper triangle of the matrix. Otherwise, add all the edges. This relies + # on the fact that the vertices created in the + # `_generated_weighted_edges()` function are actually the row/column + # indices for the matrix `A`. + # + # Without this check, we run into a problem where each edge is added twice + # when `G.add_weighted_edges_from()` is invoked below. + if G.is_multigraph() and not G.is_directed(): + triples = ((u, v, d) for u, v, d in triples if u <= v) + G.add_weighted_edges_from(triples, weight=edge_attribute) + return G + + +@nx._dispatchable(edge_attrs="weight") # edge attrs may also be obtained from `dtype` +def to_numpy_array( + G, + nodelist=None, + dtype=None, + order=None, + multigraph_weight=sum, + weight="weight", + nonedge=0.0, +): + """Returns the graph adjacency matrix as a NumPy array. + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the NumPy array. + + nodelist : list, optional + The rows and columns are ordered according to the nodes in `nodelist`. + If `nodelist` is ``None``, then the ordering is produced by ``G.nodes()``. + + dtype : NumPy data type, optional + A NumPy data type used to initialize the array. If None, then the NumPy + default is used. The dtype can be structured if `weight=None`, in which + case the dtype field names are used to look up edge attributes. The + result is a structured array where each named field in the dtype + corresponds to the adjacency for that edge attribute. See examples for + details. + + order : {'C', 'F'}, optional + Whether to store multidimensional data in C- or Fortran-contiguous + (row- or column-wise) order in memory. If None, then the NumPy default + is used. + + multigraph_weight : callable, optional + An function that determines how weights in multigraphs are handled. + The function should accept a sequence of weights and return a single + value. The default is to sum the weights of the multiple edges. + + weight : string or None optional (default = 'weight') + The edge attribute that holds the numerical value used for + the edge weight. If an edge does not have that attribute, then the + value 1 is used instead. `weight` must be ``None`` if a structured + dtype is used. + + nonedge : array_like (default = 0.0) + The value used to represent non-edges in the adjacency matrix. + The array values corresponding to nonedges are typically set to zero. + However, this could be undesirable if there are array values + corresponding to actual edges that also have the value zero. If so, + one might prefer nonedges to have some other value, such as ``nan``. + + Returns + ------- + A : NumPy ndarray + Graph adjacency matrix + + Raises + ------ + NetworkXError + If `dtype` is a structured dtype and `G` is a multigraph + ValueError + If `dtype` is a structured dtype and `weight` is not `None` + + See Also + -------- + from_numpy_array + + Notes + ----- + For directed graphs, entry ``i, j`` corresponds to an edge from ``i`` to ``j``. + + Entries in the adjacency matrix are given by the `weight` edge attribute. + When an edge does not have a weight attribute, the value of the entry is + set to the number 1. For multiple (parallel) edges, the values of the + entries are determined by the `multigraph_weight` parameter. The default is + to sum the weight attributes for each of the parallel edges. + + When `nodelist` does not contain every node in `G`, the adjacency matrix is + built from the subgraph of `G` that is induced by the nodes in `nodelist`. + + The convention used for self-loop edges in graphs is to assign the + diagonal array entry value to the weight attribute of the edge + (or the number 1 if the edge has no weight attribute). If the + alternate convention of doubling the edge weight is desired the + resulting NumPy array can be modified as follows: + + >>> import numpy as np + >>> G = nx.Graph([(1, 1)]) + >>> A = nx.to_numpy_array(G) + >>> A + array([[1.]]) + >>> A[np.diag_indices_from(A)] *= 2 + >>> A + array([[2.]]) + + Examples + -------- + >>> G = nx.MultiDiGraph() + >>> G.add_edge(0, 1, weight=2) + 0 + >>> G.add_edge(1, 0) + 0 + >>> G.add_edge(2, 2, weight=3) + 0 + >>> G.add_edge(2, 2) + 1 + >>> nx.to_numpy_array(G, nodelist=[0, 1, 2]) + array([[0., 2., 0.], + [1., 0., 0.], + [0., 0., 4.]]) + + When `nodelist` argument is used, nodes of `G` which do not appear in the `nodelist` + and their edges are not included in the adjacency matrix. Here is an example: + + >>> G = nx.Graph() + >>> G.add_edge(3, 1) + >>> G.add_edge(2, 0) + >>> G.add_edge(2, 1) + >>> G.add_edge(3, 0) + >>> nx.to_numpy_array(G, nodelist=[1, 2, 3]) + array([[0., 1., 1.], + [1., 0., 0.], + [1., 0., 0.]]) + + This function can also be used to create adjacency matrices for multiple + edge attributes with structured dtypes: + + >>> G = nx.Graph() + >>> G.add_edge(0, 1, weight=10) + >>> G.add_edge(1, 2, cost=5) + >>> G.add_edge(2, 3, weight=3, cost=-4.0) + >>> dtype = np.dtype([("weight", int), ("cost", float)]) + >>> A = nx.to_numpy_array(G, dtype=dtype, weight=None) + >>> A["weight"] + array([[ 0, 10, 0, 0], + [10, 0, 1, 0], + [ 0, 1, 0, 3], + [ 0, 0, 3, 0]]) + >>> A["cost"] + array([[ 0., 1., 0., 0.], + [ 1., 0., 5., 0.], + [ 0., 5., 0., -4.], + [ 0., 0., -4., 0.]]) + + As stated above, the argument "nonedge" is useful especially when there are + actually edges with weight 0 in the graph. Setting a nonedge value different than 0, + makes it much clearer to differentiate such 0-weighted edges and actual nonedge values. + + >>> G = nx.Graph() + >>> G.add_edge(3, 1, weight=2) + >>> G.add_edge(2, 0, weight=0) + >>> G.add_edge(2, 1, weight=0) + >>> G.add_edge(3, 0, weight=1) + >>> nx.to_numpy_array(G, nonedge=-1.0) + array([[-1., 2., -1., 1.], + [ 2., -1., 0., -1.], + [-1., 0., -1., 0.], + [ 1., -1., 0., -1.]]) + """ + import numpy as np + + if nodelist is None: + nodelist = list(G) + nlen = len(nodelist) + + # Input validation + nodeset = set(nodelist) + if nodeset - set(G): + raise nx.NetworkXError(f"Nodes {nodeset - set(G)} in nodelist is not in G") + if len(nodeset) < nlen: + raise nx.NetworkXError("nodelist contains duplicates.") + + A = np.full((nlen, nlen), fill_value=nonedge, dtype=dtype, order=order) + + # Corner cases: empty nodelist or graph without any edges + if nlen == 0 or G.number_of_edges() == 0: + return A + + # If dtype is structured and weight is None, use dtype field names as + # edge attributes + edge_attrs = None # Only single edge attribute by default + if A.dtype.names: + if weight is None: + edge_attrs = dtype.names + else: + raise ValueError( + "Specifying `weight` not supported for structured dtypes\n." + "To create adjacency matrices from structured dtypes, use `weight=None`." + ) + + # Map nodes to row/col in matrix + idx = dict(zip(nodelist, range(nlen))) + if len(nodelist) < len(G): + G = G.subgraph(nodelist).copy() + + # Collect all edge weights and reduce with `multigraph_weights` + if G.is_multigraph(): + if edge_attrs: + raise nx.NetworkXError( + "Structured arrays are not supported for MultiGraphs" + ) + d = defaultdict(list) + for u, v, wt in G.edges(data=weight, default=1.0): + d[(idx[u], idx[v])].append(wt) + i, j = np.array(list(d.keys())).T # indices + wts = [multigraph_weight(ws) for ws in d.values()] # reduced weights + else: + i, j, wts = [], [], [] + + # Special branch: multi-attr adjacency from structured dtypes + if edge_attrs: + # Extract edges with all data + for u, v, data in G.edges(data=True): + i.append(idx[u]) + j.append(idx[v]) + wts.append(data) + # Map each attribute to the appropriate named field in the + # structured dtype + for attr in edge_attrs: + attr_data = [wt.get(attr, 1.0) for wt in wts] + A[attr][i, j] = attr_data + if not G.is_directed(): + A[attr][j, i] = attr_data + return A + + for u, v, wt in G.edges(data=weight, default=1.0): + i.append(idx[u]) + j.append(idx[v]) + wts.append(wt) + + # Set array values with advanced indexing + A[i, j] = wts + if not G.is_directed(): + A[j, i] = wts + + return A + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_numpy_array( + A, parallel_edges=False, create_using=None, edge_attr="weight", *, nodelist=None +): + """Returns a graph from a 2D NumPy array. + + The 2D NumPy array is interpreted as an adjacency matrix for the graph. + + Parameters + ---------- + A : a 2D numpy.ndarray + An adjacency matrix representation of a graph + + parallel_edges : Boolean + If this is True, `create_using` is a multigraph, and `A` is an + integer array, then entry *(i, j)* in the array is interpreted as the + number of parallel edges joining vertices *i* and *j* in the graph. + If it is False, then the entries in the array are interpreted as + the weight of a single edge joining the vertices. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + edge_attr : String, optional (default="weight") + The attribute to which the array values are assigned on each edge. If + it is None, edge attributes will not be assigned. + + nodelist : sequence of nodes, optional + A sequence of objects to use as the nodes in the graph. If provided, the + list of nodes must be the same length as the dimensions of `A`. The + default is `None`, in which case the nodes are drawn from ``range(n)``. + + Notes + ----- + For directed graphs, explicitly mention create_using=nx.DiGraph, + and entry i,j of A corresponds to an edge from i to j. + + If `create_using` is :class:`networkx.MultiGraph` or + :class:`networkx.MultiDiGraph`, `parallel_edges` is True, and the + entries of `A` are of type :class:`int`, then this function returns a + multigraph (of the same type as `create_using`) with parallel edges. + + If `create_using` indicates an undirected multigraph, then only the edges + indicated by the upper triangle of the array `A` will be added to the + graph. + + If `edge_attr` is Falsy (False or None), edge attributes will not be + assigned, and the array data will be treated like a binary mask of + edge presence or absence. Otherwise, the attributes will be assigned + as follows: + + If the NumPy array has a single data type for each array entry it + will be converted to an appropriate Python data type. + + If the NumPy array has a user-specified compound data type the names + of the data fields will be used as attribute keys in the resulting + NetworkX graph. + + See Also + -------- + to_numpy_array + + Examples + -------- + Simple integer weights on edges: + + >>> import numpy as np + >>> A = np.array([[1, 1], [2, 1]]) + >>> G = nx.from_numpy_array(A) + >>> G.edges(data=True) + EdgeDataView([(0, 0, {'weight': 1}), (0, 1, {'weight': 2}), (1, 1, {'weight': 1})]) + + If `create_using` indicates a multigraph and the array has only integer + entries and `parallel_edges` is False, then the entries will be treated + as weights for edges joining the nodes (without creating parallel edges): + + >>> A = np.array([[1, 1], [1, 2]]) + >>> G = nx.from_numpy_array(A, create_using=nx.MultiGraph) + >>> G[1][1] + AtlasView({0: {'weight': 2}}) + + If `create_using` indicates a multigraph and the array has only integer + entries and `parallel_edges` is True, then the entries will be treated + as the number of parallel edges joining those two vertices: + + >>> A = np.array([[1, 1], [1, 2]]) + >>> temp = nx.MultiGraph() + >>> G = nx.from_numpy_array(A, parallel_edges=True, create_using=temp) + >>> G[1][1] + AtlasView({0: {'weight': 1}, 1: {'weight': 1}}) + + User defined compound data type on edges: + + >>> dt = [("weight", float), ("cost", int)] + >>> A = np.array([[(1.0, 2)]], dtype=dt) + >>> G = nx.from_numpy_array(A) + >>> G.edges() + EdgeView([(0, 0)]) + >>> G[0][0]["cost"] + 2 + >>> G[0][0]["weight"] + 1.0 + + """ + kind_to_python_type = { + "f": float, + "i": int, + "u": int, + "b": bool, + "c": complex, + "S": str, + "U": str, + "V": "void", + } + G = nx.empty_graph(0, create_using) + if A.ndim != 2: + raise nx.NetworkXError(f"Input array must be 2D, not {A.ndim}") + n, m = A.shape + if n != m: + raise nx.NetworkXError(f"Adjacency matrix not square: nx,ny={A.shape}") + dt = A.dtype + try: + python_type = kind_to_python_type[dt.kind] + except Exception as err: + raise TypeError(f"Unknown numpy data type: {dt}") from err + if _default_nodes := (nodelist is None): + nodelist = range(n) + else: + if len(nodelist) != n: + raise ValueError("nodelist must have the same length as A.shape[0]") + + # Make sure we get even the isolated nodes of the graph. + G.add_nodes_from(nodelist) + # Get a list of all the entries in the array with nonzero entries. These + # coordinates become edges in the graph. (convert to int from np.int64) + edges = ((int(e[0]), int(e[1])) for e in zip(*A.nonzero())) + # handle numpy constructed data type + if python_type == "void": + # Sort the fields by their offset, then by dtype, then by name. + fields = sorted( + (offset, dtype, name) for name, (dtype, offset) in A.dtype.fields.items() + ) + triples = ( + ( + u, + v, + {} + if edge_attr in [False, None] + else { + name: kind_to_python_type[dtype.kind](val) + for (_, dtype, name), val in zip(fields, A[u, v]) + }, + ) + for u, v in edges + ) + # If the entries in the adjacency matrix are integers, the graph is a + # multigraph, and parallel_edges is True, then create parallel edges, each + # with weight 1, for each entry in the adjacency matrix. Otherwise, create + # one edge for each positive entry in the adjacency matrix and set the + # weight of that edge to be the entry in the matrix. + elif python_type is int and G.is_multigraph() and parallel_edges: + chain = itertools.chain.from_iterable + # The following line is equivalent to: + # + # for (u, v) in edges: + # for d in range(A[u, v]): + # G.add_edge(u, v, weight=1) + # + if edge_attr in [False, None]: + triples = chain(((u, v, {}) for d in range(A[u, v])) for (u, v) in edges) + else: + triples = chain( + ((u, v, {edge_attr: 1}) for d in range(A[u, v])) for (u, v) in edges + ) + else: # basic data type + if edge_attr in [False, None]: + triples = ((u, v, {}) for u, v in edges) + else: + triples = ((u, v, {edge_attr: python_type(A[u, v])}) for u, v in edges) + # If we are creating an undirected multigraph, only add the edges from the + # upper triangle of the matrix. Otherwise, add all the edges. This relies + # on the fact that the vertices created in the + # `_generated_weighted_edges()` function are actually the row/column + # indices for the matrix `A`. + # + # Without this check, we run into a problem where each edge is added twice + # when `G.add_edges_from()` is invoked below. + if G.is_multigraph() and not G.is_directed(): + triples = ((u, v, d) for u, v, d in triples if u <= v) + # Remap nodes if user provided custom `nodelist` + if not _default_nodes: + idx_to_node = dict(enumerate(nodelist)) + triples = ((idx_to_node[u], idx_to_node[v], d) for u, v, d in triples) + G.add_edges_from(triples) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/__init__.py b/.venv/lib/python3.12/site-packages/networkx/drawing/__init__.py new file mode 100644 index 0000000..0f53309 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/__init__.py @@ -0,0 +1,7 @@ +# graph drawing and interface to graphviz + +from .layout import * +from .nx_latex import * +from .nx_pylab import * +from . import nx_agraph +from . import nx_pydot diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..ecde5e3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/layout.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/layout.cpython-312.pyc new file mode 100644 index 0000000..9591991 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/layout.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-312.pyc new file mode 100644 index 0000000..a024613 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_agraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_latex.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_latex.cpython-312.pyc new file mode 100644 index 0000000..ab51682 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_latex.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_pydot.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_pydot.cpython-312.pyc new file mode 100644 index 0000000..8267865 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_pydot.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-312.pyc new file mode 100644 index 0000000..d3a9a8b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/layout.py b/.venv/lib/python3.12/site-packages/networkx/drawing/layout.py new file mode 100644 index 0000000..5a447d5 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/layout.py @@ -0,0 +1,2028 @@ +""" +****** +Layout +****** + +Node positioning algorithms for graph drawing. + +For `random_layout()` the possible resulting shape +is a square of side [0, scale] (default: [0, 1]) +Changing `center` shifts the layout by that amount. + +For the other layout routines, the extent is +[center - scale, center + scale] (default: [-1, 1]). + +Warning: Most layout routines have only been tested in 2-dimensions. + +""" + +import networkx as nx +from networkx.utils import np_random_state + +__all__ = [ + "bipartite_layout", + "circular_layout", + "forceatlas2_layout", + "kamada_kawai_layout", + "random_layout", + "rescale_layout", + "rescale_layout_dict", + "shell_layout", + "spring_layout", + "spectral_layout", + "planar_layout", + "fruchterman_reingold_layout", + "spiral_layout", + "multipartite_layout", + "bfs_layout", + "arf_layout", +] + + +def _process_params(G, center, dim): + # Some boilerplate code. + import numpy as np + + if not isinstance(G, nx.Graph): + empty_graph = nx.Graph() + empty_graph.add_nodes_from(G) + G = empty_graph + + if center is None: + center = np.zeros(dim) + else: + center = np.asarray(center) + + if len(center) != dim: + msg = "length of center coordinates must match dimension of layout" + raise ValueError(msg) + + return G, center + + +@np_random_state(3) +def random_layout(G, center=None, dim=2, seed=None, store_pos_as=None): + """Position nodes uniformly at random in the unit square. + + For every node, a position is generated by choosing each of dim + coordinates uniformly at random on the interval [0.0, 1.0). + + NumPy (http://scipy.org) is required for this function. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + center : array-like or None + Coordinate pair around which to center the layout. + + dim : int + Dimension of layout. + + seed : int, RandomState instance or None optional (default=None) + Set the random state for deterministic node layouts. + If int, `seed` is the seed used by the random number generator, + if numpy.random.RandomState instance, `seed` is the random + number generator, + if None, the random number generator is the RandomState instance used + by numpy.random. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.lollipop_graph(4, 3) + >>> pos = nx.random_layout(G) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.random_layout(G, seed=42, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([0.37454012, 0.9507143 ], dtype=float32), + 1: array([0.7319939, 0.5986585], dtype=float32), + 2: array([0.15601864, 0.15599452], dtype=float32), + 3: array([0.05808361, 0.8661761 ], dtype=float32), + 4: array([0.601115 , 0.7080726], dtype=float32), + 5: array([0.02058449, 0.96990985], dtype=float32), + 6: array([0.83244264, 0.21233912], dtype=float32)} + """ + import numpy as np + + G, center = _process_params(G, center, dim) + pos = seed.rand(len(G), dim) + center + pos = pos.astype(np.float32) + pos = dict(zip(G, pos)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + return pos + + +def circular_layout(G, scale=1, center=None, dim=2, store_pos_as=None): + # dim=2 only + """Position nodes on a circle. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + dim : int + Dimension of layout. + If dim>2, the remaining dimensions are set to zero + in the returned positions. + If dim<2, a ValueError is raised. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Raises + ------ + ValueError + If dim < 2 + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(4) + >>> pos = nx.circular_layout(G) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.circular_layout(G, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([9.99999986e-01, 2.18556937e-08]), + 1: array([-3.57647606e-08, 1.00000000e+00]), + 2: array([-9.9999997e-01, -6.5567081e-08]), + 3: array([ 1.98715071e-08, -9.99999956e-01])} + + + Notes + ----- + This algorithm currently only works in two dimensions and does not + try to minimize edge crossings. + + """ + import numpy as np + + if dim < 2: + raise ValueError("cannot handle dimensions < 2") + + G, center = _process_params(G, center, dim) + + paddims = max(0, (dim - 2)) + + if len(G) == 0: + pos = {} + elif len(G) == 1: + pos = {nx.utils.arbitrary_element(G): center} + else: + # Discard the extra angle since it matches 0 radians. + theta = np.linspace(0, 1, len(G) + 1)[:-1] * 2 * np.pi + theta = theta.astype(np.float32) + pos = np.column_stack( + [np.cos(theta), np.sin(theta), np.zeros((len(G), paddims))] + ) + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(G, pos)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +def shell_layout( + G, nlist=None, rotate=None, scale=1, center=None, dim=2, store_pos_as=None +): + """Position nodes in concentric circles. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + nlist : list of lists + List of node lists for each shell. + + rotate : angle in radians (default=pi/len(nlist)) + Angle by which to rotate the starting position of each shell + relative to the starting position of the previous shell. + To recreate behavior before v2.5 use rotate=0. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + dim : int + Dimension of layout, currently only dim=2 is supported. + Other dimension values result in a ValueError. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Raises + ------ + ValueError + If dim != 2 + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(4) + >>> shells = [[0], [1, 2, 3]] + >>> pos = nx.shell_layout(G, shells) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.shell_layout(G, shells, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([0., 0.]), + 1: array([-5.00000000e-01, -4.37113883e-08]), + 2: array([ 0.24999996, -0.43301272]), + 3: array([0.24999981, 0.43301281])} + + Notes + ----- + This algorithm currently only works in two dimensions and does not + try to minimize edge crossings. + + """ + import numpy as np + + if dim != 2: + raise ValueError("can only handle 2 dimensions") + + G, center = _process_params(G, center, dim) + + if len(G) == 0: + return {} + if len(G) == 1: + return {nx.utils.arbitrary_element(G): center} + + if nlist is None: + # draw the whole graph in one shell + nlist = [list(G)] + + radius_bump = scale / len(nlist) + + if len(nlist[0]) == 1: + # single node at center + radius = 0.0 + else: + # else start at r=1 + radius = radius_bump + + if rotate is None: + rotate = np.pi / len(nlist) + first_theta = rotate + npos = {} + for nodes in nlist: + # Discard the last angle (endpoint=False) since 2*pi matches 0 radians + theta = ( + np.linspace(0, 2 * np.pi, len(nodes), endpoint=False, dtype=np.float32) + + first_theta + ) + pos = radius * np.column_stack([np.cos(theta), np.sin(theta)]) + center + npos.update(zip(nodes, pos)) + radius += radius_bump + first_theta += rotate + + if store_pos_as is not None: + nx.set_node_attributes(G, npos, store_pos_as) + return npos + + +def bipartite_layout( + G, + nodes=None, + align="vertical", + scale=1, + center=None, + aspect_ratio=4 / 3, + store_pos_as=None, +): + """Position nodes in two straight lines. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + nodes : collection of nodes + Nodes in one node set of the graph. This set will be placed on + left or top. If `None` (the default), a node set is chosen arbitrarily + if the graph if bipartite. + + align : string (default='vertical') + The alignment of nodes. Vertical or horizontal. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + aspect_ratio : number (default=4/3): + The ratio of the width to the height of the layout. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node. + + Raises + ------ + NetworkXError + If ``nodes=None`` and `G` is not bipartite. + + Examples + -------- + >>> G = nx.complete_bipartite_graph(3, 3) + >>> pos = nx.bipartite_layout(G) + + The ordering of the layout (i.e. which nodes appear on the left/top) can + be specified with the `nodes` parameter: + + >>> top, bottom = nx.bipartite.sets(G) + >>> pos = nx.bipartite_layout(G, nodes=bottom) # "bottom" set appears on the left + + `store_pos_as` can be used to store the node positions for the computed layout + directly on the nodes: + + >>> _ = nx.bipartite_layout(G, nodes=bottom, store_pos_as="pos") + >>> from pprint import pprint + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([ 1. , -0.75]), + 1: array([1., 0.]), + 2: array([1. , 0.75]), + 3: array([-1. , -0.75]), + 4: array([-1., 0.]), + 5: array([-1. , 0.75])} + + + The ``bipartite_layout`` function can be used with non-bipartite graphs + by explicitly specifying how the layout should be partitioned with `nodes`: + + >>> G = nx.complete_graph(5) # Non-bipartite + >>> pos = nx.bipartite_layout(G, nodes={0, 1, 2}) + + Notes + ----- + This algorithm currently only works in two dimensions and does not + try to minimize edge crossings. + + """ + + import numpy as np + + if align not in ("vertical", "horizontal"): + msg = "align must be either vertical or horizontal." + raise ValueError(msg) + + G, center = _process_params(G, center=center, dim=2) + if len(G) == 0: + return {} + + height = 1 + width = aspect_ratio * height + offset = (width / 2, height / 2) + + if nodes is None: + top, bottom = nx.bipartite.sets(G) + nodes = list(G) + else: + top = set(nodes) + bottom = set(G) - top + # Preserves backward-compatible node ordering in returned pos dict + nodes = list(top) + list(bottom) + + left_xs = np.repeat(0, len(top)) + right_xs = np.repeat(width, len(bottom)) + left_ys = np.linspace(0, height, len(top)) + right_ys = np.linspace(0, height, len(bottom)) + + top_pos = np.column_stack([left_xs, left_ys]) - offset + bottom_pos = np.column_stack([right_xs, right_ys]) - offset + + pos = np.concatenate([top_pos, bottom_pos]) + pos = rescale_layout(pos, scale=scale) + center + if align == "horizontal": + pos = pos[:, ::-1] # swap x and y coords + pos = dict(zip(nodes, pos)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +@np_random_state(10) +def spring_layout( + G, + k=None, + pos=None, + fixed=None, + iterations=50, + threshold=1e-4, + weight="weight", + scale=1, + center=None, + dim=2, + seed=None, + store_pos_as=None, + *, + method="auto", + gravity=1.0, +): + """Position nodes using Fruchterman-Reingold force-directed algorithm. + + The algorithm simulates a force-directed representation of the network + treating edges as springs holding nodes close, while treating nodes + as repelling objects, sometimes called an anti-gravity force. + Simulation continues until the positions are close to an equilibrium. + + There are some hard-coded values: minimal distance between + nodes (0.01) and "temperature" of 0.1 to ensure nodes don't fly away. + During the simulation, `k` helps determine the distance between nodes, + though `scale` and `center` determine the size and place after + rescaling occurs at the end of the simulation. + + Fixing some nodes doesn't allow them to move in the simulation. + It also turns off the rescaling feature at the simulation's end. + In addition, setting `scale` to `None` turns off rescaling. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + k : float (default=None) + Optimal distance between nodes. If None the distance is set to + 1/sqrt(n) where n is the number of nodes. Increase this value + to move nodes farther apart. + + pos : dict or None optional (default=None) + Initial positions for nodes as a dictionary with node as keys + and values as a coordinate list or tuple. If None, then use + random initial positions. + + fixed : list or None optional (default=None) + Nodes to keep fixed at initial position. + Nodes not in ``G.nodes`` are ignored. + ValueError raised if `fixed` specified and `pos` not. + + iterations : int optional (default=50) + Maximum number of iterations taken + + threshold: float optional (default = 1e-4) + Threshold for relative error in node position changes. + The iteration stops if the error is below this threshold. + + weight : string or None optional (default='weight') + The edge attribute that holds the numerical value used for + the edge weight. Larger means a stronger attractive force. + If None, then all edge weights are 1. + + scale : number or None (default: 1) + Scale factor for positions. Not used unless `fixed is None`. + If scale is None, no rescaling is performed. + + center : array-like or None + Coordinate pair around which to center the layout. + Not used unless `fixed is None`. + + dim : int + Dimension of layout. + + seed : int, RandomState instance or None optional (default=None) + Used only for the initial positions in the algorithm. + Set the random state for deterministic node layouts. + If int, `seed` is the seed used by the random number generator, + if numpy.random.RandomState instance, `seed` is the random + number generator, + if None, the random number generator is the RandomState instance used + by numpy.random. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + method : str optional (default='auto') + The method to compute the layout. + If 'force', the force-directed Fruchterman-Reingold algorithm [1]_ is used. + If 'energy', the energy-based optimization algorithm [2]_ is used with absolute + values of edge weights and gravitational forces acting on each connected component. + If 'auto', we use 'force' if ``len(G) < 500`` and 'energy' otherwise. + + gravity: float optional (default=1.0) + Used only for the method='energy'. + The positive coefficient of gravitational forces per connected component. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(4) + >>> pos = nx.spring_layout(G) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.spring_layout(G, seed=123, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([-0.61520994, -1. ]), + 1: array([-0.21840965, -0.35501755]), + 2: array([0.21841264, 0.35502078]), + 3: array([0.61520696, 0.99999677])} + + # The same using longer but equivalent function name + >>> pos = nx.fruchterman_reingold_layout(G) + + References + ---------- + .. [1] Fruchterman, Thomas MJ, and Edward M. Reingold. + "Graph drawing by force-directed placement." + Software: Practice and experience 21, no. 11 (1991): 1129-1164. + http://dx.doi.org/10.1002/spe.4380211102 + .. [2] Hamaguchi, Hiroki, Naoki Marumo, and Akiko Takeda. + "Initial Placement for Fruchterman--Reingold Force Model With Coordinate Newton Direction." + arXiv preprint arXiv:2412.20317 (2024). + https://arxiv.org/abs/2412.20317 + """ + import numpy as np + + if method not in ("auto", "force", "energy"): + raise ValueError("the method must be either auto, force, or energy.") + if method == "auto": + method = "force" if len(G) < 500 else "energy" + + G, center = _process_params(G, center, dim) + + if fixed is not None: + if pos is None: + raise ValueError("nodes are fixed without positions given") + for node in fixed: + if node not in pos: + raise ValueError("nodes are fixed without positions given") + nfixed = {node: i for i, node in enumerate(G)} + fixed = np.asarray([nfixed[node] for node in fixed if node in nfixed]) + + if pos is not None: + # Determine size of existing domain to adjust initial positions + dom_size = max(coord for pos_tup in pos.values() for coord in pos_tup) + if dom_size == 0: + dom_size = 1 + pos_arr = seed.rand(len(G), dim) * dom_size + center + + for i, n in enumerate(G): + if n in pos: + pos_arr[i] = np.asarray(pos[n]) + else: + pos_arr = None + dom_size = 1 + + if len(G) == 0: + return {} + if len(G) == 1: + pos = {nx.utils.arbitrary_element(G.nodes()): center} + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + return pos + + # Sparse matrix + if len(G) >= 500 or method == "energy": + A = nx.to_scipy_sparse_array(G, weight=weight, dtype="f") + if k is None and fixed is not None: + # We must adjust k by domain size for layouts not near 1x1 + nnodes, _ = A.shape + k = dom_size / np.sqrt(nnodes) + pos = _sparse_fruchterman_reingold( + A, k, pos_arr, fixed, iterations, threshold, dim, seed, method, gravity + ) + else: + A = nx.to_numpy_array(G, weight=weight) + if k is None and fixed is not None: + # We must adjust k by domain size for layouts not near 1x1 + nnodes, _ = A.shape + k = dom_size / np.sqrt(nnodes) + pos = _fruchterman_reingold( + A, k, pos_arr, fixed, iterations, threshold, dim, seed + ) + if fixed is None and scale is not None: + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(G, pos)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +fruchterman_reingold_layout = spring_layout + + +@np_random_state(7) +def _fruchterman_reingold( + A, k=None, pos=None, fixed=None, iterations=50, threshold=1e-4, dim=2, seed=None +): + # Position nodes in adjacency matrix A using Fruchterman-Reingold + # Entry point for NetworkX graph is fruchterman_reingold_layout() + import numpy as np + + try: + nnodes, _ = A.shape + except AttributeError as err: + msg = "fruchterman_reingold() takes an adjacency matrix as input" + raise nx.NetworkXError(msg) from err + + if pos is None: + # random initial positions + pos = np.asarray(seed.rand(nnodes, dim), dtype=A.dtype) + else: + # make sure positions are of same type as matrix + pos = pos.astype(A.dtype) + + # optimal distance between nodes + if k is None: + k = np.sqrt(1.0 / nnodes) + # the initial "temperature" is about .1 of domain area (=1x1) + # this is the largest step allowed in the dynamics. + # We need to calculate this in case our fixed positions force our domain + # to be much bigger than 1x1 + t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1])) * 0.1 + # simple cooling scheme. + # linearly step down by dt on each iteration so last iteration is size dt. + dt = t / (iterations + 1) + delta = np.zeros((pos.shape[0], pos.shape[0], pos.shape[1]), dtype=A.dtype) + # the inscrutable (but fast) version + # this is still O(V^2) + # could use multilevel methods to speed this up significantly + for iteration in range(iterations): + # matrix of difference between points + delta = pos[:, np.newaxis, :] - pos[np.newaxis, :, :] + # distance between points + distance = np.linalg.norm(delta, axis=-1) + # enforce minimum distance of 0.01 + np.clip(distance, 0.01, None, out=distance) + # displacement "force" + displacement = np.einsum( + "ijk,ij->ik", delta, (k * k / distance**2 - A * distance / k) + ) + # update positions + length = np.linalg.norm(displacement, axis=-1) + length = np.where(length < 0.01, 0.1, length) + delta_pos = np.einsum("ij,i->ij", displacement, t / length) + if fixed is not None: + # don't change positions of fixed nodes + delta_pos[fixed] = 0.0 + pos += delta_pos + # cool temperature + t -= dt + if (np.linalg.norm(delta_pos) / nnodes) < threshold: + break + return pos + + +@np_random_state(7) +def _sparse_fruchterman_reingold( + A, + k=None, + pos=None, + fixed=None, + iterations=50, + threshold=1e-4, + dim=2, + seed=None, + method="energy", + gravity=1.0, +): + # Position nodes in adjacency matrix A using Fruchterman-Reingold + # Entry point for NetworkX graph is fruchterman_reingold_layout() + # Sparse version + import numpy as np + import scipy as sp + + try: + nnodes, _ = A.shape + except AttributeError as err: + msg = "fruchterman_reingold() takes an adjacency matrix as input" + raise nx.NetworkXError(msg) from err + + if pos is None: + # random initial positions + pos = np.asarray(seed.rand(nnodes, dim), dtype=A.dtype) + else: + # make sure positions are of same type as matrix + pos = pos.astype(A.dtype) + + # no fixed nodes + if fixed is None: + fixed = [] + + # optimal distance between nodes + if k is None: + k = np.sqrt(1.0 / nnodes) + + if method == "energy": + return _energy_fruchterman_reingold( + A, nnodes, k, pos, fixed, iterations, threshold, dim, gravity + ) + + # make sure we have a LIst of Lists representation + try: + A = A.tolil() + except AttributeError: + A = (sp.sparse.coo_array(A)).tolil() + + # the initial "temperature" is about .1 of domain area (=1x1) + # this is the largest step allowed in the dynamics. + t = max(max(pos.T[0]) - min(pos.T[0]), max(pos.T[1]) - min(pos.T[1])) * 0.1 + # simple cooling scheme. + # linearly step down by dt on each iteration so last iteration is size dt. + dt = t / (iterations + 1) + + displacement = np.zeros((dim, nnodes)) + for iteration in range(iterations): + displacement *= 0 + # loop over rows + for i in range(A.shape[0]): + if i in fixed: + continue + # difference between this row's node position and all others + delta = (pos[i] - pos).T + # distance between points + distance = np.sqrt((delta**2).sum(axis=0)) + # enforce minimum distance of 0.01 + distance = np.where(distance < 0.01, 0.01, distance) + # the adjacency matrix row + Ai = A.getrowview(i).toarray() # TODO: revisit w/ sparse 1D container + # displacement "force" + displacement[:, i] += ( + delta * (k * k / distance**2 - Ai * distance / k) + ).sum(axis=1) + # update positions + length = np.sqrt((displacement**2).sum(axis=0)) + length = np.where(length < 0.01, 0.1, length) + delta_pos = (displacement * t / length).T + pos += delta_pos + # cool temperature + t -= dt + if (np.linalg.norm(delta_pos) / nnodes) < threshold: + break + return pos + + +def _energy_fruchterman_reingold( + A, nnodes, k, pos, fixed, iterations, threshold, dim, gravity +): + # Entry point for NetworkX graph is fruchterman_reingold_layout() + # energy-based version + import numpy as np + import scipy as sp + + if gravity <= 0: + raise ValueError(f"the gravity must be positive.") + + # make sure we have a Compressed Sparse Row format + try: + A = A.tocsr() + except AttributeError: + A = sp.sparse.csr_array(A) + + # Take absolute values of edge weights and symmetrize it + A = np.abs(A) + A = (A + A.T) / 2 + + n_components, labels = sp.sparse.csgraph.connected_components(A, directed=False) + bincount = np.bincount(labels) + batchsize = 500 + + def _cost_FR(x): + pos = x.reshape((nnodes, dim)) + grad = np.zeros((nnodes, dim)) + cost = 0.0 + for l in range(0, nnodes, batchsize): + r = min(l + batchsize, nnodes) + # difference between selected node positions and all others + delta = pos[l:r, np.newaxis, :] - pos[np.newaxis, :, :] + # distance between points with a minimum distance of 1e-5 + distance2 = np.sum(delta * delta, axis=2) + distance2 = np.maximum(distance2, 1e-10) + distance = np.sqrt(distance2) + # temporary variable for calculation + Ad = A[l:r] * distance + # attractive forces and repulsive forces + grad[l:r] = 2 * np.einsum("ij,ijk->ik", Ad / k - k**2 / distance2, delta) + # integrated attractive forces + cost += np.sum(Ad * distance2) / (3 * k) + # integrated repulsive forces + cost -= k**2 * np.sum(np.log(distance)) + # gravitational force from the centroids of connected components to (0.5, ..., 0.5)^T + centers = np.zeros((n_components, dim)) + np.add.at(centers, labels, pos) + delta0 = centers / bincount[:, np.newaxis] - 0.5 + grad += gravity * delta0[labels] + cost += gravity * 0.5 * np.sum(bincount * np.linalg.norm(delta0, axis=1) ** 2) + # fix positions of fixed nodes + grad[fixed] = 0.0 + return cost, grad.ravel() + + # Optimization of the energy function by L-BFGS algorithm + options = {"maxiter": iterations, "gtol": threshold} + return sp.optimize.minimize( + _cost_FR, pos.ravel(), method="L-BFGS-B", jac=True, options=options + ).x.reshape((nnodes, dim)) + + +def kamada_kawai_layout( + G, + dist=None, + pos=None, + weight="weight", + scale=1, + center=None, + dim=2, + store_pos_as=None, +): + """Position nodes using Kamada-Kawai path-length cost-function. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + dist : dict (default=None) + A two-level dictionary of optimal distances between nodes, + indexed by source and destination node. + If None, the distance is computed using shortest_path_length(). + + pos : dict or None optional (default=None) + Initial positions for nodes as a dictionary with node as keys + and values as a coordinate list or tuple. If None, then use + circular_layout() for dim >= 2 and a linear layout for dim == 1. + + weight : string or None optional (default='weight') + The edge attribute that holds the numerical value used for + the edge weight. If None, then all edge weights are 1. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + dim : int + Dimension of layout. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(4) + >>> pos = nx.kamada_kawai_layout(G) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.kamada_kawai_layout(G, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([0.99996577, 0.99366857]), + 1: array([0.32913544, 0.33543827]), + 2: array([-0.33544334, -0.32910684]), + 3: array([-0.99365787, -1. ])} + """ + import numpy as np + + G, center = _process_params(G, center, dim) + nNodes = len(G) + if nNodes == 0: + return {} + + if dist is None: + dist = dict(nx.shortest_path_length(G, weight=weight)) + dist_mtx = 1e6 * np.ones((nNodes, nNodes)) + for row, nr in enumerate(G): + if nr not in dist: + continue + rdist = dist[nr] + for col, nc in enumerate(G): + if nc not in rdist: + continue + dist_mtx[row][col] = rdist[nc] + + if pos is None: + if dim >= 3: + pos = random_layout(G, dim=dim) + elif dim == 2: + pos = circular_layout(G, dim=dim) + else: + pos = dict(zip(G, np.linspace(0, 1, len(G)))) + pos_arr = np.array([pos[n] for n in G]) + + pos = _kamada_kawai_solve(dist_mtx, pos_arr, dim) + + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(G, pos)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +def _kamada_kawai_solve(dist_mtx, pos_arr, dim): + # Anneal node locations based on the Kamada-Kawai cost-function, + # using the supplied matrix of preferred inter-node distances, + # and starting locations. + + import numpy as np + import scipy as sp + + meanwt = 1e-3 + costargs = (np, 1 / (dist_mtx + np.eye(dist_mtx.shape[0]) * 1e-3), meanwt, dim) + + optresult = sp.optimize.minimize( + _kamada_kawai_costfn, + pos_arr.ravel(), + method="L-BFGS-B", + args=costargs, + jac=True, + ) + + return optresult.x.reshape((-1, dim)) + + +def _kamada_kawai_costfn(pos_vec, np, invdist, meanweight, dim): + # Cost-function and gradient for Kamada-Kawai layout algorithm + nNodes = invdist.shape[0] + pos_arr = pos_vec.reshape((nNodes, dim)) + + delta = pos_arr[:, np.newaxis, :] - pos_arr[np.newaxis, :, :] + nodesep = np.linalg.norm(delta, axis=-1) + direction = np.einsum("ijk,ij->ijk", delta, 1 / (nodesep + np.eye(nNodes) * 1e-3)) + + offset = nodesep * invdist - 1.0 + offset[np.diag_indices(nNodes)] = 0 + + cost = 0.5 * np.sum(offset**2) + grad = np.einsum("ij,ij,ijk->ik", invdist, offset, direction) - np.einsum( + "ij,ij,ijk->jk", invdist, offset, direction + ) + + # Additional parabolic term to encourage mean position to be near origin: + sumpos = np.sum(pos_arr, axis=0) + cost += 0.5 * meanweight * np.sum(sumpos**2) + grad += meanweight * sumpos + + return (cost, grad.ravel()) + + +def spectral_layout(G, weight="weight", scale=1, center=None, dim=2, store_pos_as=None): + """Position nodes using the eigenvectors of the graph Laplacian. + + Using the unnormalized Laplacian, the layout shows possible clusters of + nodes which are an approximation of the ratio cut. If dim is the number of + dimensions then the positions are the entries of the dim eigenvectors + corresponding to the ascending eigenvalues starting from the second one. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + weight : string or None optional (default='weight') + The edge attribute that holds the numerical value used for + the edge weight. If None, then all edge weights are 1. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + dim : int + Dimension of layout. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(4) + >>> pos = nx.spectral_layout(G) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.spectral_layout(G, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([-1. , 0.76536686]), + 1: array([-0.41421356, -0.76536686]), + 2: array([ 0.41421356, -0.76536686]), + 3: array([1. , 0.76536686])} + + + Notes + ----- + Directed graphs will be considered as undirected graphs when + positioning the nodes. + + For larger graphs (>500 nodes) this will use the SciPy sparse + eigenvalue solver (ARPACK). + """ + # handle some special cases that break the eigensolvers + import numpy as np + + G, center = _process_params(G, center, dim) + + if len(G) <= 2: + if len(G) == 0: + pos = np.array([]) + elif len(G) == 1: + pos = np.array([center]) + else: + pos = np.array([np.zeros(dim), np.array(center) * 2.0]) + return dict(zip(G, pos)) + try: + # Sparse matrix + if len(G) < 500: # dense solver is faster for small graphs + raise ValueError + A = nx.to_scipy_sparse_array(G, weight=weight, dtype="d") + # Symmetrize directed graphs + if G.is_directed(): + A = A + np.transpose(A) + pos = _sparse_spectral(A, dim) + except (ImportError, ValueError): + # Dense matrix + A = nx.to_numpy_array(G, weight=weight) + # Symmetrize directed graphs + if G.is_directed(): + A += A.T + pos = _spectral(A, dim) + + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(G, pos)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +def _spectral(A, dim=2): + # Input adjacency matrix A + # Uses dense eigenvalue solver from numpy + import numpy as np + + try: + nnodes, _ = A.shape + except AttributeError as err: + msg = "spectral() takes an adjacency matrix as input" + raise nx.NetworkXError(msg) from err + + # form Laplacian matrix where D is diagonal of degrees + D = np.identity(nnodes, dtype=A.dtype) * np.sum(A, axis=1) + L = D - A + + eigenvalues, eigenvectors = np.linalg.eig(L) + # sort and keep smallest nonzero + index = np.argsort(eigenvalues)[1 : dim + 1] # 0 index is zero eigenvalue + return np.real(eigenvectors[:, index]) + + +def _sparse_spectral(A, dim=2): + # Input adjacency matrix A + # Uses sparse eigenvalue solver from scipy + # Could use multilevel methods here, see Koren "On spectral graph drawing" + import numpy as np + import scipy as sp + + try: + nnodes, _ = A.shape + except AttributeError as err: + msg = "sparse_spectral() takes an adjacency matrix as input" + raise nx.NetworkXError(msg) from err + + # form Laplacian matrix + # TODO: Rm csr_array wrapper in favor of spdiags array constructor when available + D = sp.sparse.csr_array(sp.sparse.spdiags(A.sum(axis=1), 0, nnodes, nnodes)) + L = D - A + + k = dim + 1 + # number of Lanczos vectors for ARPACK solver.What is the right scaling? + ncv = max(2 * k + 1, int(np.sqrt(nnodes))) + # return smallest k eigenvalues and eigenvectors + eigenvalues, eigenvectors = sp.sparse.linalg.eigsh(L, k, which="SM", ncv=ncv) + index = np.argsort(eigenvalues)[1:k] # 0 index is zero eigenvalue + return np.real(eigenvectors[:, index]) + + +def planar_layout(G, scale=1, center=None, dim=2, store_pos_as=None): + """Position nodes without edge intersections. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. If G is of type + nx.PlanarEmbedding, the positions are selected accordingly. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + dim : int + Dimension of layout. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Raises + ------ + NetworkXException + If G is not planar + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(4) + >>> pos = nx.planar_layout(G) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.planar_layout(G, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([-0.77777778, -0.33333333]), + 1: array([ 1. , -0.33333333]), + 2: array([0.11111111, 0.55555556]), + 3: array([-0.33333333, 0.11111111])} + """ + import numpy as np + + if dim != 2: + raise ValueError("can only handle 2 dimensions") + + G, center = _process_params(G, center, dim) + + if len(G) == 0: + return {} + + if isinstance(G, nx.PlanarEmbedding): + embedding = G + else: + is_planar, embedding = nx.check_planarity(G) + if not is_planar: + raise nx.NetworkXException("G is not planar.") + pos = nx.combinatorial_embedding_to_pos(embedding) + node_list = list(embedding) + pos = np.vstack([pos[x] for x in node_list]) + pos = pos.astype(np.float64) + pos = rescale_layout(pos, scale=scale) + center + pos = dict(zip(node_list, pos)) + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + return pos + + +def spiral_layout( + G, + scale=1, + center=None, + dim=2, + resolution=0.35, + equidistant=False, + store_pos_as=None, +): + """Position nodes in a spiral layout. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + dim : int, default=2 + Dimension of layout, currently only dim=2 is supported. + Other dimension values result in a ValueError. + + resolution : float, default=0.35 + The compactness of the spiral layout returned. + Lower values result in more compressed spiral layouts. + + equidistant : bool, default=False + If True, nodes will be positioned equidistant from each other + by decreasing angle further from center. + If False, nodes will be positioned at equal angles + from each other by increasing separation further from center. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node + + Raises + ------ + ValueError + If dim != 2 + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(4) + >>> pos = nx.spiral_layout(G) + >>> nx.draw(G, pos=pos) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.spiral_layout(G, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([-0.64153279, -0.68555087]), + 1: array([-0.03307913, -0.46344795]), + 2: array([0.34927952, 0.14899882]), + 3: array([0.32533239, 1. ])} + + Notes + ----- + This algorithm currently only works in two dimensions. + + """ + import numpy as np + + if dim != 2: + raise ValueError("can only handle 2 dimensions") + + G, center = _process_params(G, center, dim) + + if len(G) == 0: + return {} + if len(G) == 1: + pos = {nx.utils.arbitrary_element(G): center} + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + return pos + + pos = [] + if equidistant: + chord = 1 + step = 0.5 + theta = resolution + theta += chord / (step * theta) + for _ in range(len(G)): + r = step * theta + theta += chord / r + pos.append([np.cos(theta) * r, np.sin(theta) * r]) + + else: + dist = np.arange(len(G), dtype=float) + angle = resolution * dist + pos = np.transpose(dist * np.array([np.cos(angle), np.sin(angle)])) + + pos = rescale_layout(np.array(pos), scale=scale) + center + + pos = dict(zip(G, pos)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +def multipartite_layout( + G, subset_key="subset", align="vertical", scale=1, center=None, store_pos_as=None +): + """Position nodes in layers of straight lines. + + Parameters + ---------- + G : NetworkX graph or list of nodes + A position will be assigned to every node in G. + + subset_key : string or dict (default='subset') + If a string, the key of node data in G that holds the node subset. + If a dict, keyed by layer number to the nodes in that layer/subset. + + align : string (default='vertical') + The alignment of nodes. Vertical or horizontal. + + scale : number (default: 1) + Scale factor for positions. + + center : array-like or None + Coordinate pair around which to center the layout. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node. + + Examples + -------- + >>> G = nx.complete_multipartite_graph(28, 16, 10) + >>> pos = nx.multipartite_layout(G) + >>> # suppress the returned dict and store on the graph directly + >>> G = nx.complete_multipartite_graph(28, 16, 10) + >>> _ = nx.multipartite_layout(G, store_pos_as="pos") + + or use a dict to provide the layers of the layout + + >>> G = nx.Graph([(0, 1), (1, 2), (1, 3), (3, 4)]) + >>> layers = {"a": [0], "b": [1], "c": [2, 3], "d": [4]} + >>> pos = nx.multipartite_layout(G, subset_key=layers) + + Notes + ----- + This algorithm currently only works in two dimensions and does not + try to minimize edge crossings. + + Network does not need to be a complete multipartite graph. As long as nodes + have subset_key data, they will be placed in the corresponding layers. + + """ + import numpy as np + + if align not in ("vertical", "horizontal"): + msg = "align must be either vertical or horizontal." + raise ValueError(msg) + + G, center = _process_params(G, center=center, dim=2) + if len(G) == 0: + return {} + + try: + # check if subset_key is dict-like + if len(G) != sum(len(nodes) for nodes in subset_key.values()): + raise nx.NetworkXError( + "all nodes must be in one subset of `subset_key` dict" + ) + except AttributeError: + # subset_key is not a dict, hence a string + node_to_subset = nx.get_node_attributes(G, subset_key) + if len(node_to_subset) != len(G): + raise nx.NetworkXError( + f"all nodes need a subset_key attribute: {subset_key}" + ) + subset_key = nx.utils.groups(node_to_subset) + + # Sort by layer, if possible + try: + layers = dict(sorted(subset_key.items())) + except TypeError: + layers = subset_key + + pos = None + nodes = [] + width = len(layers) + for i, layer in enumerate(layers.values()): + height = len(layer) + xs = np.repeat(i, height) + ys = np.arange(0, height, dtype=float) + offset = ((width - 1) / 2, (height - 1) / 2) + layer_pos = np.column_stack([xs, ys]) - offset + if pos is None: + pos = layer_pos + else: + pos = np.concatenate([pos, layer_pos]) + nodes.extend(layer) + pos = rescale_layout(pos, scale=scale) + center + if align == "horizontal": + pos = pos[:, ::-1] # swap x and y coords + pos = dict(zip(nodes, pos)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +@np_random_state("seed") +def arf_layout( + G, + pos=None, + scaling=1, + a=1.1, + etol=1e-6, + dt=1e-3, + max_iter=1000, + *, + seed=None, + store_pos_as=None, +): + """Arf layout for networkx + + The attractive and repulsive forces (arf) layout [1] improves the spring + layout in three ways. First, it prevents congestion of highly connected nodes + due to strong forcing between nodes. Second, it utilizes the layout space + more effectively by preventing large gaps that spring layout tends to create. + Lastly, the arf layout represents symmetries in the layout better than the + default spring layout. + + Parameters + ---------- + G : nx.Graph or nx.DiGraph + Networkx graph. + pos : dict + Initial position of the nodes. If set to None a + random layout will be used. + scaling : float + Scales the radius of the circular layout space. + a : float + Strength of springs between connected nodes. Should be larger than 1. + The greater a, the clearer the separation of unconnected sub clusters. + etol : float + Gradient sum of spring forces must be larger than `etol` before successful + termination. + dt : float + Time step for force differential equation simulations. + max_iter : int + Max iterations before termination of the algorithm. + seed : int, RandomState instance or None optional (default=None) + Set the random state for deterministic node layouts. + If int, `seed` is the seed used by the random number generator, + if numpy.random.RandomState instance, `seed` is the random + number generator, + if None, the random number generator is the RandomState instance used + by numpy.random. + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node. + + Examples + -------- + >>> G = nx.grid_graph((5, 5)) + >>> pos = nx.arf_layout(G) + >>> # suppress the returned dict and store on the graph directly + >>> G = nx.grid_graph((5, 5)) + >>> _ = nx.arf_layout(G, store_pos_as="pos") + + References + ---------- + .. [1] "Self-Organization Applied to Dynamic Network Layout", M. Geipel, + International Journal of Modern Physics C, 2007, Vol 18, No 10, + pp. 1537-1549. + https://doi.org/10.1142/S0129183107011558 https://arxiv.org/abs/0704.1748 + """ + import warnings + + import numpy as np + + if a <= 1: + msg = "The parameter a should be larger than 1" + raise ValueError(msg) + + pos_tmp = nx.random_layout(G, seed=seed) + if pos is None: + pos = pos_tmp + else: + for node in G.nodes(): + if node not in pos: + pos[node] = pos_tmp[node].copy() + + # Initialize spring constant matrix + N = len(G) + # No nodes no computation + if N == 0: + return pos + + # init force of springs + K = np.ones((N, N)) - np.eye(N) + node_order = {node: i for i, node in enumerate(G)} + for x, y in G.edges(): + if x != y: + idx, jdx = (node_order[i] for i in (x, y)) + K[idx, jdx] = a + + # vectorize values + p = np.asarray(list(pos.values())) + + # equation 10 in [1] + rho = scaling * np.sqrt(N) + + # looping variables + error = etol + 1 + n_iter = 0 + while error > etol: + diff = p[:, np.newaxis] - p[np.newaxis] + A = np.linalg.norm(diff, axis=-1)[..., np.newaxis] + # attraction_force - repulsions force + # suppress nans due to division; caused by diagonal set to zero. + # Does not affect the computation due to nansum + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + change = K[..., np.newaxis] * diff - rho / A * diff + change = np.nansum(change, axis=0) + p += change * dt + + error = np.linalg.norm(change, axis=-1).sum() + if n_iter > max_iter: + break + n_iter += 1 + + pos = dict(zip(G.nodes(), p)) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +@np_random_state("seed") +@nx._dispatchable(edge_attrs="weight", mutates_input={"store_pos_as": 15}) +def forceatlas2_layout( + G, + pos=None, + *, + max_iter=100, + jitter_tolerance=1.0, + scaling_ratio=2.0, + gravity=1.0, + distributed_action=False, + strong_gravity=False, + node_mass=None, + node_size=None, + weight=None, + dissuade_hubs=False, + linlog=False, + seed=None, + dim=2, + store_pos_as=None, +): + """Position nodes using the ForceAtlas2 force-directed layout algorithm. + + This function applies the ForceAtlas2 layout algorithm [1]_ to a NetworkX graph, + positioning the nodes in a way that visually represents the structure of the graph. + The algorithm uses physical simulation to minimize the energy of the system, + resulting in a more readable layout. + + Parameters + ---------- + G : nx.Graph + A NetworkX graph to be laid out. + pos : dict or None, optional + Initial positions of the nodes. If None, random initial positions are used. + max_iter : int (default: 100) + Number of iterations for the layout optimization. + jitter_tolerance : float (default: 1.0) + Controls the tolerance for adjusting the speed of layout generation. + scaling_ratio : float (default: 2.0) + Determines the scaling of attraction and repulsion forces. + gravity : float (default: 1.0) + Determines the amount of attraction on nodes to the center. Prevents islands + (i.e. weakly connected or disconnected parts of the graph) + from drifting away. + distributed_action : bool (default: False) + Distributes the attraction force evenly among nodes. + strong_gravity : bool (default: False) + Applies a strong gravitational pull towards the center. + node_mass : dict or None, optional + Maps nodes to their masses, influencing the attraction to other nodes. + node_size : dict or None, optional + Maps nodes to their sizes, preventing crowding by creating a halo effect. + weight : string or None, optional (default: None) + The edge attribute that holds the numerical value used for + the edge weight. If None, then all edge weights are 1. + dissuade_hubs : bool (default: False) + Prevents the clustering of hub nodes. + linlog : bool (default: False) + Uses logarithmic attraction instead of linear. + seed : int, RandomState instance or None optional (default=None) + Used only for the initial positions in the algorithm. + Set the random state for deterministic node layouts. + If int, `seed` is the seed used by the random number generator, + if numpy.random.RandomState instance, `seed` is the random + number generator, + if None, the random number generator is the RandomState instance used + by numpy.random. + dim : int (default: 2) + Sets the dimensions for the layout. Ignored if `pos` is provided. + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Examples + -------- + >>> import networkx as nx + >>> G = nx.florentine_families_graph() + >>> pos = nx.forceatlas2_layout(G) + >>> nx.draw(G, pos=pos) + >>> # suppress the returned dict and store on the graph directly + >>> pos = nx.forceatlas2_layout(G, store_pos_as="pos") + >>> _ = nx.forceatlas2_layout(G, store_pos_as="pos") + + References + ---------- + .. [1] Jacomy, M., Venturini, T., Heymann, S., & Bastian, M. (2014). + ForceAtlas2, a continuous graph layout algorithm for handy network + visualization designed for the Gephi software. PloS one, 9(6), e98679. + https://doi.org/10.1371/journal.pone.0098679 + """ + import numpy as np + + if len(G) == 0: + return {} + # parse optional pos positions + if pos is None: + pos = nx.random_layout(G, dim=dim, seed=seed) + pos_arr = np.array(list(pos.values())) + elif len(pos) == len(G): + pos_arr = np.array([pos[node].copy() for node in G]) + else: + # set random node pos within the initial pos values + pos_init = np.array(list(pos.values())) + max_pos = pos_init.max(axis=0) + min_pos = pos_init.min(axis=0) + dim = max_pos.size + pos_arr = min_pos + seed.rand(len(G), dim) * (max_pos - min_pos) + for idx, node in enumerate(G): + if node in pos: + pos_arr[idx] = pos[node].copy() + + mass = np.zeros(len(G)) + size = np.zeros(len(G)) + + # Only adjust for size when the users specifies size other than default (1) + adjust_sizes = False + if node_size is None: + node_size = {} + else: + adjust_sizes = True + + if node_mass is None: + node_mass = {} + + for idx, node in enumerate(G): + mass[idx] = node_mass.get(node, G.degree(node) + 1) + size[idx] = node_size.get(node, 1) + + n = len(G) + gravities = np.zeros((n, dim)) + attraction = np.zeros((n, dim)) + repulsion = np.zeros((n, dim)) + A = nx.to_numpy_array(G, weight=weight) + + def estimate_factor(n, swing, traction, speed, speed_efficiency, jitter_tolerance): + """Computes the scaling factor for the force in the ForceAtlas2 layout algorithm. + + This helper function adjusts the speed and + efficiency of the layout generation based on the + current state of the system, such as the number of + nodes, current swing, and traction forces. + + Parameters + ---------- + n : int + Number of nodes in the graph. + swing : float + The current swing, representing the oscillation of the nodes. + traction : float + The current traction force, representing the attraction between nodes. + speed : float + The current speed of the layout generation. + speed_efficiency : float + The efficiency of the current speed, influencing how fast the layout converges. + jitter_tolerance : float + The tolerance for jitter, affecting how much speed adjustment is allowed. + + Returns + ------- + tuple + A tuple containing the updated speed and speed efficiency. + + Notes + ----- + This function is a part of the ForceAtlas2 layout algorithm and is used to dynamically adjust the + layout parameters to achieve an optimal and stable visualization. + + """ + import numpy as np + + # estimate jitter + opt_jitter = 0.05 * np.sqrt(n) + min_jitter = np.sqrt(opt_jitter) + max_jitter = 10 + min_speed_efficiency = 0.05 + + other = min(max_jitter, opt_jitter * traction / n**2) + jitter = jitter_tolerance * max(min_jitter, other) + + if swing / traction > 2.0: + if speed_efficiency > min_speed_efficiency: + speed_efficiency *= 0.5 + jitter = max(jitter, jitter_tolerance) + if swing == 0: + target_speed = np.inf + else: + target_speed = jitter * speed_efficiency * traction / swing + + if swing > jitter * traction: + if speed_efficiency > min_speed_efficiency: + speed_efficiency *= 0.7 + elif speed < 1000: + speed_efficiency *= 1.3 + + max_rise = 0.5 + speed = speed + min(target_speed - speed, max_rise * speed) + return speed, speed_efficiency + + speed = 1 + speed_efficiency = 1 + swing = 1 + traction = 1 + for _ in range(max_iter): + # compute pairwise difference + diff = pos_arr[:, None] - pos_arr[None] + # compute pairwise distance + distance = np.linalg.norm(diff, axis=-1) + + # linear attraction + if linlog: + attraction = -np.log(1 + distance) / distance + np.fill_diagonal(attraction, 0) + attraction = np.einsum("ij, ij -> ij", attraction, A) + attraction = np.einsum("ijk, ij -> ik", diff, attraction) + + else: + attraction = -np.einsum("ijk, ij -> ik", diff, A) + + if distributed_action: + attraction /= mass[:, None] + + # repulsion + tmp = mass[:, None] @ mass[None] + if adjust_sizes: + distance += -size[:, None] - size[None] + + d2 = distance**2 + # remove self-interaction + np.fill_diagonal(tmp, 0) + np.fill_diagonal(d2, 1) + factor = (tmp / d2) * scaling_ratio + repulsion = np.einsum("ijk, ij -> ik", diff, factor) + + # gravity + pos_centered = pos_arr - np.mean(pos_arr, axis=0) + if strong_gravity: + gravities = -gravity * mass[:, None] * pos_centered + else: + # hide warnings for divide by zero. Then change nan to 0 + with np.errstate(divide="ignore", invalid="ignore"): + unit_vec = pos_centered / np.linalg.norm(pos_centered, axis=-1)[:, None] + unit_vec = np.nan_to_num(unit_vec, nan=0) + gravities = -gravity * mass[:, None] * unit_vec + + # total forces + update = attraction + repulsion + gravities + + # compute total swing and traction + swing += (mass * np.linalg.norm(pos_arr - update, axis=-1)).sum() + traction += (0.5 * mass * np.linalg.norm(pos_arr + update, axis=-1)).sum() + + speed, speed_efficiency = estimate_factor( + n, + swing, + traction, + speed, + speed_efficiency, + jitter_tolerance, + ) + + # update pos + if adjust_sizes: + df = np.linalg.norm(update, axis=-1) + swinging = mass * df + factor = 0.1 * speed / (1 + np.sqrt(speed * swinging)) + factor = np.minimum(factor * df, 10.0 * np.ones(df.shape)) / df + else: + swinging = mass * np.linalg.norm(update, axis=-1) + factor = speed / (1 + np.sqrt(speed * swinging)) + + factored_update = update * factor[:, None] + pos_arr += factored_update + if abs(factored_update).sum() < 1e-10: + break + + pos = dict(zip(G, pos_arr)) + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos + + +def rescale_layout(pos, scale=1): + """Returns scaled position array to (-scale, scale) in all axes. + + The function acts on NumPy arrays which hold position information. + Each position is one row of the array. The dimension of the space + equals the number of columns. Each coordinate in one column. + + To rescale, the mean (center) is subtracted from each axis separately. + Then all values are scaled so that the largest magnitude value + from all axes equals `scale` (thus, the aspect ratio is preserved). + The resulting NumPy Array is returned (order of rows unchanged). + + Parameters + ---------- + pos : numpy array + positions to be scaled. Each row is a position. + + scale : number (default: 1) + The size of the resulting extent in all directions. + + attribute : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute named `attribute` which can be accessed with + `G.nodes[...][attribute]`. The function still returns the dictionary. + + Returns + ------- + pos : numpy array + scaled positions. Each row is a position. + + See Also + -------- + rescale_layout_dict + """ + import numpy as np + + # Find max length over all dimensions + pos -= pos.mean(axis=0) + lim = np.abs(pos).max() # max coordinate for all axes + # rescale to (-scale, scale) in all directions, preserves aspect + if lim > 0: + pos *= scale / lim + return pos + + +def rescale_layout_dict(pos, scale=1): + """Return a dictionary of scaled positions keyed by node + + Parameters + ---------- + pos : A dictionary of positions keyed by node + + scale : number (default: 1) + The size of the resulting extent in all directions. + + Returns + ------- + pos : A dictionary of positions keyed by node + + Examples + -------- + >>> import numpy as np + >>> pos = {0: np.array((0, 0)), 1: np.array((1, 1)), 2: np.array((0.5, 0.5))} + >>> nx.rescale_layout_dict(pos) + {0: array([-1., -1.]), 1: array([1., 1.]), 2: array([0., 0.])} + + >>> pos = {0: np.array((0, 0)), 1: np.array((-1, 1)), 2: np.array((-0.5, 0.5))} + >>> nx.rescale_layout_dict(pos, scale=2) + {0: array([ 2., -2.]), 1: array([-2., 2.]), 2: array([0., 0.])} + + See Also + -------- + rescale_layout + """ + import numpy as np + + if not pos: # empty_graph + return {} + pos_v = np.array(list(pos.values())) + pos_v = rescale_layout(pos_v, scale=scale) + return dict(zip(pos, pos_v)) + + +def bfs_layout(G, start, *, align="vertical", scale=1, center=None, store_pos_as=None): + """Position nodes according to breadth-first search algorithm. + + Parameters + ---------- + G : NetworkX graph + A position will be assigned to every node in G. + + start : node in `G` + Starting node for bfs + + center : array-like or None + Coordinate pair around which to center the layout. + + store_pos_as : str, default None + If non-None, the position of each node will be stored on the graph as + an attribute with this string as its name, which can be accessed with + ``G.nodes[...][store_pos_as]``. The function still returns the dictionary. + + Returns + ------- + pos : dict + A dictionary of positions keyed by node. + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(4) + >>> pos = nx.bfs_layout(G, 0) + >>> # suppress the returned dict and store on the graph directly + >>> _ = nx.bfs_layout(G, 0, store_pos_as="pos") + >>> pprint(nx.get_node_attributes(G, "pos")) + {0: array([-1., 0.]), + 1: array([-0.33333333, 0. ]), + 2: array([0.33333333, 0. ]), + 3: array([1., 0.])} + + + + Notes + ----- + This algorithm currently only works in two dimensions and does not + try to minimize edge crossings. + + """ + G, center = _process_params(G, center, 2) + + # Compute layers with BFS + layers = dict(enumerate(nx.bfs_layers(G, start))) + + if len(G) != sum(len(nodes) for nodes in layers.values()): + raise nx.NetworkXError( + "bfs_layout didn't include all nodes. Perhaps use input graph:\n" + " G.subgraph(nx.node_connected_component(G, start))" + ) + + # Compute node positions with multipartite_layout + pos = multipartite_layout( + G, subset_key=layers, align=align, scale=scale, center=center + ) + + if store_pos_as is not None: + nx.set_node_attributes(G, pos, store_pos_as) + + return pos diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/nx_agraph.py b/.venv/lib/python3.12/site-packages/networkx/drawing/nx_agraph.py new file mode 100644 index 0000000..8f1a7be --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/nx_agraph.py @@ -0,0 +1,464 @@ +""" +*************** +Graphviz AGraph +*************** + +Interface to pygraphviz AGraph class. + +Examples +-------- +>>> G = nx.complete_graph(5) +>>> A = nx.nx_agraph.to_agraph(G) +>>> H = nx.nx_agraph.from_agraph(A) + +See Also +-------- + - Pygraphviz: http://pygraphviz.github.io/ + - Graphviz: https://www.graphviz.org + - DOT Language: http://www.graphviz.org/doc/info/lang.html +""" + +import tempfile + +import networkx as nx + +__all__ = [ + "from_agraph", + "to_agraph", + "write_dot", + "read_dot", + "graphviz_layout", + "pygraphviz_layout", + "view_pygraphviz", +] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_agraph(A, create_using=None): + """Returns a NetworkX Graph or DiGraph from a PyGraphviz graph. + + Parameters + ---------- + A : PyGraphviz AGraph + A graph created with PyGraphviz + + create_using : NetworkX graph constructor, optional (default=None) + Graph type to create. If graph instance, then cleared before populated. + If `None`, then the appropriate Graph type is inferred from `A`. + + Examples + -------- + >>> K5 = nx.complete_graph(5) + >>> A = nx.nx_agraph.to_agraph(K5) + >>> G = nx.nx_agraph.from_agraph(A) + + Notes + ----- + The Graph G will have a dictionary G.graph_attr containing + the default graphviz attributes for graphs, nodes and edges. + + Default node attributes will be in the dictionary G.node_attr + which is keyed by node. + + Edge attributes will be returned as edge data in G. With + edge_attr=False the edge data will be the Graphviz edge weight + attribute or the value 1 if no edge weight attribute is found. + + """ + if create_using is None: + if A.is_directed(): + if A.is_strict(): + create_using = nx.DiGraph + else: + create_using = nx.MultiDiGraph + else: + if A.is_strict(): + create_using = nx.Graph + else: + create_using = nx.MultiGraph + + # assign defaults + N = nx.empty_graph(0, create_using) + if A.name is not None: + N.name = A.name + + # add graph attributes + N.graph.update(A.graph_attr) + + # add nodes, attributes to N.node_attr + for n in A.nodes(): + str_attr = {str(k): v for k, v in n.attr.items()} + N.add_node(str(n), **str_attr) + + # add edges, assign edge data as dictionary of attributes + for e in A.edges(): + u, v = str(e[0]), str(e[1]) + attr = dict(e.attr) + str_attr = {str(k): v for k, v in attr.items()} + if not N.is_multigraph(): + if e.name is not None: + str_attr["key"] = e.name + N.add_edge(u, v, **str_attr) + else: + N.add_edge(u, v, key=e.name, **str_attr) + + # add default attributes for graph, nodes, and edges + # hang them on N.graph_attr + N.graph["graph"] = dict(A.graph_attr) + N.graph["node"] = dict(A.node_attr) + N.graph["edge"] = dict(A.edge_attr) + return N + + +def to_agraph(N): + """Returns a pygraphviz graph from a NetworkX graph N. + + Parameters + ---------- + N : NetworkX graph + A graph created with NetworkX + + Examples + -------- + >>> K5 = nx.complete_graph(5) + >>> A = nx.nx_agraph.to_agraph(K5) + + Notes + ----- + If N has an dict N.graph_attr an attempt will be made first + to copy properties attached to the graph (see from_agraph) + and then updated with the calling arguments if any. + + """ + try: + import pygraphviz + except ImportError as err: + raise ImportError("requires pygraphviz http://pygraphviz.github.io/") from err + directed = N.is_directed() + strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph() + + A = pygraphviz.AGraph(name=N.name, strict=strict, directed=directed) + + # default graph attributes + A.graph_attr.update(N.graph.get("graph", {})) + A.node_attr.update(N.graph.get("node", {})) + A.edge_attr.update(N.graph.get("edge", {})) + + A.graph_attr.update( + (k, v) for k, v in N.graph.items() if k not in ("graph", "node", "edge") + ) + + # add nodes + for n, nodedata in N.nodes(data=True): + A.add_node(n) + # Add node data + a = A.get_node(n) + for key, val in nodedata.items(): + if key == "pos": + a.attr["pos"] = f"{val[0]},{val[1]}!" + else: + a.attr[key] = str(val) + + # loop over edges + if N.is_multigraph(): + for u, v, key, edgedata in N.edges(data=True, keys=True): + str_edgedata = {k: str(v) for k, v in edgedata.items() if k != "key"} + A.add_edge(u, v, key=str(key)) + # Add edge data + a = A.get_edge(u, v) + a.attr.update(str_edgedata) + + else: + for u, v, edgedata in N.edges(data=True): + str_edgedata = {k: str(v) for k, v in edgedata.items()} + A.add_edge(u, v) + # Add edge data + a = A.get_edge(u, v) + a.attr.update(str_edgedata) + + return A + + +def write_dot(G, path): + """Write NetworkX graph G to Graphviz dot format on path. + + Parameters + ---------- + G : graph + A networkx graph + path : filename + Filename or file handle to write + + Notes + ----- + To use a specific graph layout, call ``A.layout`` prior to `write_dot`. + Note that some graphviz layouts are not guaranteed to be deterministic, + see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info. + """ + A = to_agraph(G) + A.write(path) + A.clear() + return + + +@nx._dispatchable(name="agraph_read_dot", graphs=None, returns_graph=True) +def read_dot(path): + """Returns a NetworkX graph from a dot file on path. + + Parameters + ---------- + path : file or string + File name or file handle to read. + """ + try: + import pygraphviz + except ImportError as err: + raise ImportError( + "read_dot() requires pygraphviz http://pygraphviz.github.io/" + ) from err + A = pygraphviz.AGraph(file=path) + gr = from_agraph(A) + A.clear() + return gr + + +def graphviz_layout(G, prog="neato", root=None, args=""): + """Create node positions for G using Graphviz. + + Parameters + ---------- + G : NetworkX graph + A graph created with NetworkX + prog : string + Name of Graphviz layout program + root : string, optional + Root node for twopi layout + args : string, optional + Extra arguments to Graphviz layout program + + Returns + ------- + Dictionary of x, y, positions keyed by node. + + Examples + -------- + >>> G = nx.petersen_graph() + >>> pos = nx.nx_agraph.graphviz_layout(G) + >>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot") + + Notes + ----- + This is a wrapper for pygraphviz_layout. + + Note that some graphviz layouts are not guaranteed to be deterministic, + see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info. + """ + return pygraphviz_layout(G, prog=prog, root=root, args=args) + + +def pygraphviz_layout(G, prog="neato", root=None, args=""): + """Create node positions for G using Graphviz. + + Parameters + ---------- + G : NetworkX graph + A graph created with NetworkX + prog : string + Name of Graphviz layout program + root : string, optional + Root node for twopi layout + args : string, optional + Extra arguments to Graphviz layout program + + Returns + ------- + node_pos : dict + Dictionary of x, y, positions keyed by node. + + Examples + -------- + >>> G = nx.petersen_graph() + >>> pos = nx.nx_agraph.graphviz_layout(G) + >>> pos = nx.nx_agraph.graphviz_layout(G, prog="dot") + + Notes + ----- + If you use complex node objects, they may have the same string + representation and GraphViz could treat them as the same node. + The layout may assign both nodes a single location. See Issue #1568 + If this occurs in your case, consider relabeling the nodes just + for the layout computation using something similar to:: + + >>> H = nx.convert_node_labels_to_integers(G, label_attribute="node_label") + >>> H_layout = nx.nx_agraph.pygraphviz_layout(H, prog="dot") + >>> G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()} + + Note that some graphviz layouts are not guaranteed to be deterministic, + see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info. + """ + try: + import pygraphviz + except ImportError as err: + raise ImportError("requires pygraphviz http://pygraphviz.github.io/") from err + if root is not None: + args += f"-Groot={root}" + A = to_agraph(G) + A.layout(prog=prog, args=args) + node_pos = {} + for n in G: + node = pygraphviz.Node(A, n) + try: + xs = node.attr["pos"].split(",") + node_pos[n] = tuple(float(x) for x in xs) + except: + print("no position for node", n) + node_pos[n] = (0.0, 0.0) + return node_pos + + +@nx.utils.open_file(5, "w+b") +def view_pygraphviz( + G, edgelabel=None, prog="dot", args="", suffix="", path=None, show=True +): + """Views the graph G using the specified layout algorithm. + + Parameters + ---------- + G : NetworkX graph + The machine to draw. + edgelabel : str, callable, None + If a string, then it specifies the edge attribute to be displayed + on the edge labels. If a callable, then it is called for each + edge and it should return the string to be displayed on the edges. + The function signature of `edgelabel` should be edgelabel(data), + where `data` is the edge attribute dictionary. + prog : string + Name of Graphviz layout program. + args : str + Additional arguments to pass to the Graphviz layout program. + suffix : str + If `filename` is None, we save to a temporary file. The value of + `suffix` will appear at the tail end of the temporary filename. + path : str, None + The filename used to save the image. If None, save to a temporary + file. File formats are the same as those from pygraphviz.agraph.draw. + Filenames ending in .gz or .bz2 will be compressed. + show : bool, default = True + Whether to display the graph with :mod:`PIL.Image.show`, + default is `True`. If `False`, the rendered graph is still available + at `path`. + + Returns + ------- + path : str + The filename of the generated image. + A : PyGraphviz graph + The PyGraphviz graph instance used to generate the image. + + Notes + ----- + If this function is called in succession too quickly, sometimes the + image is not displayed. So you might consider time.sleep(.5) between + calls if you experience problems. + + Note that some graphviz layouts are not guaranteed to be deterministic, + see https://gitlab.com/graphviz/graphviz/-/issues/1767 for more info. + + """ + if not len(G): + raise nx.NetworkXException("An empty graph cannot be drawn.") + + # If we are providing default values for graphviz, these must be set + # before any nodes or edges are added to the PyGraphviz graph object. + # The reason for this is that default values only affect incoming objects. + # If you change the default values after the objects have been added, + # then they inherit no value and are set only if explicitly set. + + # to_agraph() uses these values. + attrs = ["edge", "node", "graph"] + for attr in attrs: + if attr not in G.graph: + G.graph[attr] = {} + + # These are the default values. + edge_attrs = {"fontsize": "10"} + node_attrs = { + "style": "filled", + "fillcolor": "#0000FF40", + "height": "0.75", + "width": "0.75", + "shape": "circle", + } + graph_attrs = {} + + def update_attrs(which, attrs): + # Update graph attributes. Return list of those which were added. + added = [] + for k, v in attrs.items(): + if k not in G.graph[which]: + G.graph[which][k] = v + added.append(k) + + def clean_attrs(which, added): + # Remove added attributes + for attr in added: + del G.graph[which][attr] + if not G.graph[which]: + del G.graph[which] + + # Update all default values + update_attrs("edge", edge_attrs) + update_attrs("node", node_attrs) + update_attrs("graph", graph_attrs) + + # Convert to agraph, so we inherit default values + A = to_agraph(G) + + # Remove the default values we added to the original graph. + clean_attrs("edge", edge_attrs) + clean_attrs("node", node_attrs) + clean_attrs("graph", graph_attrs) + + # If the user passed in an edgelabel, we update the labels for all edges. + if edgelabel is not None: + if not callable(edgelabel): + + def func(data): + return "".join([" ", str(data[edgelabel]), " "]) + + else: + func = edgelabel + + # update all the edge labels + if G.is_multigraph(): + for u, v, key, data in G.edges(keys=True, data=True): + # PyGraphviz doesn't convert the key to a string. See #339 + edge = A.get_edge(u, v, str(key)) + edge.attr["label"] = str(func(data)) + else: + for u, v, data in G.edges(data=True): + edge = A.get_edge(u, v) + edge.attr["label"] = str(func(data)) + + if path is None: + ext = "png" + if suffix: + suffix = f"_{suffix}.{ext}" + else: + suffix = f".{ext}" + path = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) + else: + # Assume the decorator worked and it is a file-object. + pass + + # Write graph to file + A.draw(path=path, format=None, prog=prog, args=args) + path.close() + + # Show graph in a new window (depends on platform configuration) + if show: + from PIL import Image + + Image.open(path.name).show() + + return path.name, A diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/nx_latex.py b/.venv/lib/python3.12/site-packages/networkx/drawing/nx_latex.py new file mode 100644 index 0000000..677def8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/nx_latex.py @@ -0,0 +1,570 @@ +r""" +***** +LaTeX +***** + +Export NetworkX graphs in LaTeX format using the TikZ library within TeX/LaTeX. +Usually, you will want the drawing to appear in a figure environment so +you use ``to_latex(G, caption="A caption")``. If you want the raw +drawing commands without a figure environment use :func:`to_latex_raw`. +And if you want to write to a file instead of just returning the latex +code as a string, use ``write_latex(G, "filename.tex", caption="A caption")``. + +To construct a figure with subfigures for each graph to be shown, provide +``to_latex`` or ``write_latex`` a list of graphs, a list of subcaptions, +and a number of rows of subfigures inside the figure. + +To be able to refer to the figures or subfigures in latex using ``\\ref``, +the keyword ``latex_label`` is available for figures and `sub_labels` for +a list of labels, one for each subfigure. + +We intend to eventually provide an interface to the TikZ Graph +features which include e.g. layout algorithms. + +Let us know via github what you'd like to see available, or better yet +give us some code to do it, or even better make a github pull request +to add the feature. + +The TikZ approach +================= +Drawing options can be stored on the graph as node/edge attributes, or +can be provided as dicts keyed by node/edge to a string of the options +for that node/edge. Similarly a label can be shown for each node/edge +by specifying the labels as graph node/edge attributes or by providing +a dict keyed by node/edge to the text to be written for that node/edge. + +Options for the tikzpicture environment (e.g. "[scale=2]") can be provided +via a keyword argument. Similarly default node and edge options can be +provided through keywords arguments. The default node options are applied +to the single TikZ "path" that draws all nodes (and no edges). The default edge +options are applied to a TikZ "scope" which contains a path for each edge. + +Examples +======== +>>> G = nx.path_graph(3) +>>> nx.write_latex(G, "just_my_figure.tex", as_document=True) +>>> nx.write_latex(G, "my_figure.tex", caption="A path graph", latex_label="fig1") +>>> latex_code = nx.to_latex(G) # a string rather than a file + +You can change many features of the nodes and edges. + +>>> G = nx.path_graph(4, create_using=nx.DiGraph) +>>> pos = {n: (n, n) for n in G} # nodes set on a line + +>>> G.nodes[0]["style"] = "blue" +>>> G.nodes[2]["style"] = "line width=3,draw" +>>> G.nodes[3]["label"] = "Stop" +>>> G.edges[(0, 1)]["label"] = "1st Step" +>>> G.edges[(0, 1)]["label_opts"] = "near start" +>>> G.edges[(1, 2)]["style"] = "line width=3" +>>> G.edges[(1, 2)]["label"] = "2nd Step" +>>> G.edges[(2, 3)]["style"] = "green" +>>> G.edges[(2, 3)]["label"] = "3rd Step" +>>> G.edges[(2, 3)]["label_opts"] = "near end" + +>>> nx.write_latex(G, "latex_graph.tex", pos=pos, as_document=True) + +Then compile the LaTeX using something like ``pdflatex latex_graph.tex`` +and view the pdf file created: ``latex_graph.pdf``. + +If you want **subfigures** each containing one graph, you can input a list of graphs. + +>>> H1 = nx.path_graph(4) +>>> H2 = nx.complete_graph(4) +>>> H3 = nx.path_graph(8) +>>> H4 = nx.complete_graph(8) +>>> graphs = [H1, H2, H3, H4] +>>> caps = ["Path 4", "Complete graph 4", "Path 8", "Complete graph 8"] +>>> lbls = ["fig2a", "fig2b", "fig2c", "fig2d"] +>>> nx.write_latex(graphs, "subfigs.tex", n_rows=2, sub_captions=caps, sub_labels=lbls) +>>> latex_code = nx.to_latex(graphs, n_rows=2, sub_captions=caps, sub_labels=lbls) + +>>> node_color = {0: "red", 1: "orange", 2: "blue", 3: "gray!90"} +>>> edge_width = {e: "line width=1.5" for e in H3.edges} +>>> pos = nx.circular_layout(H3) +>>> latex_code = nx.to_latex(H3, pos, node_options=node_color, edge_options=edge_width) +>>> print(latex_code) +\documentclass{report} +\usepackage{tikz} +\usepackage{subcaption} + +\begin{document} +\begin{figure} + \begin{tikzpicture} + \draw + (1.0, 0.0) node[red] (0){0} + (0.707, 0.707) node[orange] (1){1} + (-0.0, 1.0) node[blue] (2){2} + (-0.707, 0.707) node[gray!90] (3){3} + (-1.0, -0.0) node (4){4} + (-0.707, -0.707) node (5){5} + (0.0, -1.0) node (6){6} + (0.707, -0.707) node (7){7}; + \begin{scope}[-] + \draw[line width=1.5] (0) to (1); + \draw[line width=1.5] (1) to (2); + \draw[line width=1.5] (2) to (3); + \draw[line width=1.5] (3) to (4); + \draw[line width=1.5] (4) to (5); + \draw[line width=1.5] (5) to (6); + \draw[line width=1.5] (6) to (7); + \end{scope} + \end{tikzpicture} +\end{figure} +\end{document} + +Notes +----- +If you want to change the preamble/postamble of the figure/document/subfigure +environment, use the keyword arguments: `figure_wrapper`, `document_wrapper`, +`subfigure_wrapper`. The default values are stored in private variables +e.g. ``nx.nx_layout._DOCUMENT_WRAPPER`` + +References +---------- +TikZ: https://tikz.dev/ + +TikZ options details: https://tikz.dev/tikz-actions +""" + +import networkx as nx + +__all__ = [ + "to_latex_raw", + "to_latex", + "write_latex", +] + + +@nx.utils.not_implemented_for("multigraph") +def to_latex_raw( + G, + pos="pos", + tikz_options="", + default_node_options="", + node_options="node_options", + node_label="label", + default_edge_options="", + edge_options="edge_options", + edge_label="label", + edge_label_options="edge_label_options", +): + """Return a string of the LaTeX/TikZ code to draw `G` + + This function produces just the code for the tikzpicture + without any enclosing environment. + + Parameters + ========== + G : NetworkX graph + The NetworkX graph to be drawn + pos : string or dict (default "pos") + The name of the node attribute on `G` that holds the position of each node. + Positions can be sequences of length 2 with numbers for (x,y) coordinates. + They can also be strings to denote positions in TikZ style, such as (x, y) + or (angle:radius). + If a dict, it should be keyed by node to a position. + If an empty dict, a circular layout is computed by TikZ. + tikz_options : string + The tikzpicture options description defining the options for the picture. + Often large scale options like `[scale=2]`. + default_node_options : string + The draw options for a path of nodes. Individual node options override these. + node_options : string or dict + The name of the node attribute on `G` that holds the options for each node. + Or a dict keyed by node to a string holding the options for that node. + node_label : string or dict + The name of the node attribute on `G` that holds the node label (text) + displayed for each node. If the attribute is "" or not present, the node + itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed. + Or a dict keyed by node to a string holding the label for that node. + default_edge_options : string + The options for the scope drawing all edges. The default is "[-]" for + undirected graphs and "[->]" for directed graphs. + edge_options : string or dict + The name of the edge attribute on `G` that holds the options for each edge. + If the edge is a self-loop and ``"loop" not in edge_options`` the option + "loop," is added to the options for the self-loop edge. Hence you can + use "[loop above]" explicitly, but the default is "[loop]". + Or a dict keyed by edge to a string holding the options for that edge. + edge_label : string or dict + The name of the edge attribute on `G` that holds the edge label (text) + displayed for each edge. If the attribute is "" or not present, no edge + label is drawn. + Or a dict keyed by edge to a string holding the label for that edge. + edge_label_options : string or dict + The name of the edge attribute on `G` that holds the label options for + each edge. For example, "[sloped,above,blue]". The default is no options. + Or a dict keyed by edge to a string holding the label options for that edge. + + Returns + ======= + latex_code : string + The text string which draws the desired graph(s) when compiled by LaTeX. + + See Also + ======== + to_latex + write_latex + """ + i4 = "\n " + i8 = "\n " + + # set up position dict + # TODO allow pos to be None and use a nice TikZ default + if not isinstance(pos, dict): + pos = nx.get_node_attributes(G, pos) + if not pos: + # circular layout with radius 2 + pos = {n: f"({round(360.0 * i / len(G), 3)}:2)" for i, n in enumerate(G)} + for node in G: + if node not in pos: + raise nx.NetworkXError(f"node {node} has no specified pos {pos}") + posnode = pos[node] + if not isinstance(posnode, str): + try: + posx, posy = posnode + pos[node] = f"({round(posx, 3)}, {round(posy, 3)})" + except (TypeError, ValueError): + msg = f"position pos[{node}] is not 2-tuple or a string: {posnode}" + raise nx.NetworkXError(msg) + + # set up all the dicts + if not isinstance(node_options, dict): + node_options = nx.get_node_attributes(G, node_options) + if not isinstance(node_label, dict): + node_label = nx.get_node_attributes(G, node_label) + if not isinstance(edge_options, dict): + edge_options = nx.get_edge_attributes(G, edge_options) + if not isinstance(edge_label, dict): + edge_label = nx.get_edge_attributes(G, edge_label) + if not isinstance(edge_label_options, dict): + edge_label_options = nx.get_edge_attributes(G, edge_label_options) + + # process default options (add brackets or not) + topts = "" if tikz_options == "" else f"[{tikz_options.strip('[]')}]" + defn = "" if default_node_options == "" else f"[{default_node_options.strip('[]')}]" + linestyle = f"{'->' if G.is_directed() else '-'}" + if default_edge_options == "": + defe = "[" + linestyle + "]" + elif "-" in default_edge_options: + defe = default_edge_options + else: + defe = f"[{linestyle},{default_edge_options.strip('[]')}]" + + # Construct the string line by line + result = " \\begin{tikzpicture}" + topts + result += i4 + " \\draw" + defn + # load the nodes + for n in G: + # node options goes inside square brackets + nopts = f"[{node_options[n].strip('[]')}]" if n in node_options else "" + # node text goes inside curly brackets {} + ntext = f"{{{node_label[n]}}}" if n in node_label else f"{{{n}}}" + + result += i8 + f"{pos[n]} node{nopts} ({n}){ntext}" + result += ";\n" + + # load the edges + result += " \\begin{scope}" + defe + for edge in G.edges: + u, v = edge[:2] + e_opts = f"{edge_options[edge]}".strip("[]") if edge in edge_options else "" + # add loop options for selfloops if not present + if u == v and "loop" not in e_opts: + e_opts = "loop," + e_opts + e_opts = f"[{e_opts}]" if e_opts != "" else "" + # TODO -- handle bending of multiedges + + els = edge_label_options[edge] if edge in edge_label_options else "" + # edge label options goes inside square brackets [] + els = f"[{els.strip('[]')}]" + # edge text is drawn using the TikZ node command inside curly brackets {} + e_label = f" node{els} {{{edge_label[edge]}}}" if edge in edge_label else "" + + result += i8 + f"\\draw{e_opts} ({u}) to{e_label} ({v});" + + result += "\n \\end{scope}\n \\end{tikzpicture}\n" + return result + + +_DOC_WRAPPER_TIKZ = r"""\documentclass{{report}} +\usepackage{{tikz}} +\usepackage{{subcaption}} + +\begin{{document}} +{content} +\end{{document}}""" + + +_FIG_WRAPPER = r"""\begin{{figure}} +{content}{caption}{label} +\end{{figure}}""" + + +_SUBFIG_WRAPPER = r""" \begin{{subfigure}}{{{size}\textwidth}} +{content}{caption}{label} + \end{{subfigure}}""" + + +def to_latex( + Gbunch, + pos="pos", + tikz_options="", + default_node_options="", + node_options="node_options", + node_label="node_label", + default_edge_options="", + edge_options="edge_options", + edge_label="edge_label", + edge_label_options="edge_label_options", + caption="", + latex_label="", + sub_captions=None, + sub_labels=None, + n_rows=1, + as_document=True, + document_wrapper=_DOC_WRAPPER_TIKZ, + figure_wrapper=_FIG_WRAPPER, + subfigure_wrapper=_SUBFIG_WRAPPER, +): + """Return latex code to draw the graph(s) in `Gbunch` + + The TikZ drawing utility in LaTeX is used to draw the graph(s). + If `Gbunch` is a graph, it is drawn in a figure environment. + If `Gbunch` is an iterable of graphs, each is drawn in a subfigure environment + within a single figure environment. + + If `as_document` is True, the figure is wrapped inside a document environment + so that the resulting string is ready to be compiled by LaTeX. Otherwise, + the string is ready for inclusion in a larger tex document using ``\\include`` + or ``\\input`` statements. + + Parameters + ========== + Gbunch : NetworkX graph or iterable of NetworkX graphs + The NetworkX graph to be drawn or an iterable of graphs + to be drawn inside subfigures of a single figure. + pos : string or list of strings + The name of the node attribute on `G` that holds the position of each node. + Positions can be sequences of length 2 with numbers for (x,y) coordinates. + They can also be strings to denote positions in TikZ style, such as (x, y) + or (angle:radius). + If a dict, it should be keyed by node to a position. + If an empty dict, a circular layout is computed by TikZ. + If you are drawing many graphs in subfigures, use a list of position dicts. + tikz_options : string + The tikzpicture options description defining the options for the picture. + Often large scale options like `[scale=2]`. + default_node_options : string + The draw options for a path of nodes. Individual node options override these. + node_options : string or dict + The name of the node attribute on `G` that holds the options for each node. + Or a dict keyed by node to a string holding the options for that node. + node_label : string or dict + The name of the node attribute on `G` that holds the node label (text) + displayed for each node. If the attribute is "" or not present, the node + itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed. + Or a dict keyed by node to a string holding the label for that node. + default_edge_options : string + The options for the scope drawing all edges. The default is "[-]" for + undirected graphs and "[->]" for directed graphs. + edge_options : string or dict + The name of the edge attribute on `G` that holds the options for each edge. + If the edge is a self-loop and ``"loop" not in edge_options`` the option + "loop," is added to the options for the self-loop edge. Hence you can + use "[loop above]" explicitly, but the default is "[loop]". + Or a dict keyed by edge to a string holding the options for that edge. + edge_label : string or dict + The name of the edge attribute on `G` that holds the edge label (text) + displayed for each edge. If the attribute is "" or not present, no edge + label is drawn. + Or a dict keyed by edge to a string holding the label for that edge. + edge_label_options : string or dict + The name of the edge attribute on `G` that holds the label options for + each edge. For example, "[sloped,above,blue]". The default is no options. + Or a dict keyed by edge to a string holding the label options for that edge. + caption : string + The caption string for the figure environment + latex_label : string + The latex label used for the figure for easy referral from the main text + sub_captions : list of strings + The sub_caption string for each subfigure in the figure + sub_latex_labels : list of strings + The latex label for each subfigure in the figure + n_rows : int + The number of rows of subfigures to arrange for multiple graphs + as_document : bool + Whether to wrap the latex code in a document environment for compiling + document_wrapper : formatted text string with variable ``content``. + This text is called to evaluate the content embedded in a document + environment with a preamble setting up TikZ. + figure_wrapper : formatted text string + This text is evaluated with variables ``content``, ``caption`` and ``label``. + It wraps the content and if a caption is provided, adds the latex code for + that caption, and if a label is provided, adds the latex code for a label. + subfigure_wrapper : formatted text string + This text evaluate variables ``size``, ``content``, ``caption`` and ``label``. + It wraps the content and if a caption is provided, adds the latex code for + that caption, and if a label is provided, adds the latex code for a label. + The size is the vertical size of each row of subfigures as a fraction. + + Returns + ======= + latex_code : string + The text string which draws the desired graph(s) when compiled by LaTeX. + + See Also + ======== + write_latex + to_latex_raw + """ + if hasattr(Gbunch, "adj"): + raw = to_latex_raw( + Gbunch, + pos, + tikz_options, + default_node_options, + node_options, + node_label, + default_edge_options, + edge_options, + edge_label, + edge_label_options, + ) + else: # iterator of graphs + sbf = subfigure_wrapper + size = 1 / n_rows + + N = len(Gbunch) + if isinstance(pos, str | dict): + pos = [pos] * N + if sub_captions is None: + sub_captions = [""] * N + if sub_labels is None: + sub_labels = [""] * N + if not (len(Gbunch) == len(pos) == len(sub_captions) == len(sub_labels)): + raise nx.NetworkXError( + "length of Gbunch, sub_captions and sub_figures must agree" + ) + + raw = "" + for G, pos, subcap, sublbl in zip(Gbunch, pos, sub_captions, sub_labels): + subraw = to_latex_raw( + G, + pos, + tikz_options, + default_node_options, + node_options, + node_label, + default_edge_options, + edge_options, + edge_label, + edge_label_options, + ) + cap = f" \\caption{{{subcap}}}" if subcap else "" + lbl = f"\\label{{{sublbl}}}" if sublbl else "" + raw += sbf.format(size=size, content=subraw, caption=cap, label=lbl) + raw += "\n" + + # put raw latex code into a figure environment and optionally into a document + raw = raw[:-1] + cap = f"\n \\caption{{{caption}}}" if caption else "" + lbl = f"\\label{{{latex_label}}}" if latex_label else "" + fig = figure_wrapper.format(content=raw, caption=cap, label=lbl) + if as_document: + return document_wrapper.format(content=fig) + return fig + + +@nx.utils.open_file(1, mode="w") +def write_latex(Gbunch, path, **options): + """Write the latex code to draw the graph(s) onto `path`. + + This convenience function creates the latex drawing code as a string + and writes that to a file ready to be compiled when `as_document` is True + or ready to be ``import`` ed or ``include`` ed into your main LaTeX document. + + The `path` argument can be a string filename or a file handle to write to. + + Parameters + ---------- + Gbunch : NetworkX graph or iterable of NetworkX graphs + If Gbunch is a graph, it is drawn in a figure environment. + If Gbunch is an iterable of graphs, each is drawn in a subfigure + environment within a single figure environment. + path : string or file + Filename or file handle to write to. + Filenames ending in .gz or .bz2 will be compressed. + options : dict + By default, TikZ is used with options: (others are ignored):: + + pos : string or dict or list + The name of the node attribute on `G` that holds the position of each node. + Positions can be sequences of length 2 with numbers for (x,y) coordinates. + They can also be strings to denote positions in TikZ style, such as (x, y) + or (angle:radius). + If a dict, it should be keyed by node to a position. + If an empty dict, a circular layout is computed by TikZ. + If you are drawing many graphs in subfigures, use a list of position dicts. + tikz_options : string + The tikzpicture options description defining the options for the picture. + Often large scale options like `[scale=2]`. + default_node_options : string + The draw options for a path of nodes. Individual node options override these. + node_options : string or dict + The name of the node attribute on `G` that holds the options for each node. + Or a dict keyed by node to a string holding the options for that node. + node_label : string or dict + The name of the node attribute on `G` that holds the node label (text) + displayed for each node. If the attribute is "" or not present, the node + itself is drawn as a string. LaTeX processing such as ``"$A_1$"`` is allowed. + Or a dict keyed by node to a string holding the label for that node. + default_edge_options : string + The options for the scope drawing all edges. The default is "[-]" for + undirected graphs and "[->]" for directed graphs. + edge_options : string or dict + The name of the edge attribute on `G` that holds the options for each edge. + If the edge is a self-loop and ``"loop" not in edge_options`` the option + "loop," is added to the options for the self-loop edge. Hence you can + use "[loop above]" explicitly, but the default is "[loop]". + Or a dict keyed by edge to a string holding the options for that edge. + edge_label : string or dict + The name of the edge attribute on `G` that holds the edge label (text) + displayed for each edge. If the attribute is "" or not present, no edge + label is drawn. + Or a dict keyed by edge to a string holding the label for that edge. + edge_label_options : string or dict + The name of the edge attribute on `G` that holds the label options for + each edge. For example, "[sloped,above,blue]". The default is no options. + Or a dict keyed by edge to a string holding the label options for that edge. + caption : string + The caption string for the figure environment + latex_label : string + The latex label used for the figure for easy referral from the main text + sub_captions : list of strings + The sub_caption string for each subfigure in the figure + sub_latex_labels : list of strings + The latex label for each subfigure in the figure + n_rows : int + The number of rows of subfigures to arrange for multiple graphs + as_document : bool + Whether to wrap the latex code in a document environment for compiling + document_wrapper : formatted text string with variable ``content``. + This text is called to evaluate the content embedded in a document + environment with a preamble setting up the TikZ syntax. + figure_wrapper : formatted text string + This text is evaluated with variables ``content``, ``caption`` and ``label``. + It wraps the content and if a caption is provided, adds the latex code for + that caption, and if a label is provided, adds the latex code for a label. + subfigure_wrapper : formatted text string + This text evaluate variables ``size``, ``content``, ``caption`` and ``label``. + It wraps the content and if a caption is provided, adds the latex code for + that caption, and if a label is provided, adds the latex code for a label. + The size is the vertical size of each row of subfigures as a fraction. + + See Also + ======== + to_latex + """ + path.write(to_latex(Gbunch, **options)) diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/nx_pydot.py b/.venv/lib/python3.12/site-packages/networkx/drawing/nx_pydot.py new file mode 100644 index 0000000..0fe5cee --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/nx_pydot.py @@ -0,0 +1,361 @@ +""" +***** +Pydot +***** + +Import and export NetworkX graphs in Graphviz dot format using pydot. + +Either this module or nx_agraph can be used to interface with graphviz. + +Examples +-------- +>>> G = nx.complete_graph(5) +>>> PG = nx.nx_pydot.to_pydot(G) +>>> H = nx.nx_pydot.from_pydot(PG) + +See Also +-------- + - pydot: https://github.com/erocarrera/pydot + - Graphviz: https://www.graphviz.org + - DOT Language: http://www.graphviz.org/doc/info/lang.html +""" + +from locale import getpreferredencoding + +import networkx as nx +from networkx.utils import open_file + +__all__ = [ + "write_dot", + "read_dot", + "graphviz_layout", + "pydot_layout", + "to_pydot", + "from_pydot", +] + + +@open_file(1, mode="w") +def write_dot(G, path): + """Write NetworkX graph G to Graphviz dot format on path. + + Parameters + ---------- + G : NetworkX graph + + path : string or file + Filename or file handle for data output. + Filenames ending in .gz or .bz2 will be compressed. + """ + P = to_pydot(G) + path.write(P.to_string()) + return + + +@open_file(0, mode="r") +@nx._dispatchable(name="pydot_read_dot", graphs=None, returns_graph=True) +def read_dot(path): + """Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the + dot file with the passed path. + + If this file contains multiple graphs, only the first such graph is + returned. All graphs _except_ the first are silently ignored. + + Parameters + ---------- + path : str or file + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + Returns + ------- + G : MultiGraph or MultiDiGraph + A :class:`MultiGraph` or :class:`MultiDiGraph`. + + Notes + ----- + Use `G = nx.Graph(nx.nx_pydot.read_dot(path))` to return a :class:`Graph` instead of a + :class:`MultiGraph`. + """ + import pydot + + data = path.read() + + # List of one or more "pydot.Dot" instances deserialized from this file. + P_list = pydot.graph_from_dot_data(data) + + # Convert only the first such instance into a NetworkX graph. + return from_pydot(P_list[0]) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_pydot(P): + """Returns a NetworkX graph from a Pydot graph. + + Parameters + ---------- + P : Pydot graph + A graph created with Pydot + + Returns + ------- + G : NetworkX multigraph + A MultiGraph or MultiDiGraph. + + Examples + -------- + >>> K5 = nx.complete_graph(5) + >>> A = nx.nx_pydot.to_pydot(K5) + >>> G = nx.nx_pydot.from_pydot(A) # return MultiGraph + + # make a Graph instead of MultiGraph + >>> G = nx.Graph(nx.nx_pydot.from_pydot(A)) + + """ + # NOTE: Pydot v3 expects a dummy argument whereas Pydot v4 doesn't + # Remove the try-except when Pydot v4 becomes the minimum supported version + try: + strict = P.get_strict() + except TypeError: + strict = P.get_strict(None) # pydot bug: get_strict() shouldn't take argument + multiedges = not strict + + if P.get_type() == "graph": # undirected + if multiedges: + N = nx.MultiGraph() + else: + N = nx.Graph() + else: + if multiedges: + N = nx.MultiDiGraph() + else: + N = nx.DiGraph() + + # assign defaults + name = P.get_name().strip('"') + if name != "": + N.name = name + + # add nodes, attributes to N.node_attr + for p in P.get_node_list(): + n = p.get_name().strip('"') + if n in ("node", "graph", "edge"): + continue + N.add_node(n, **p.get_attributes()) + + # add edges + for e in P.get_edge_list(): + u = e.get_source() + v = e.get_destination() + attr = e.get_attributes() + s = [] + d = [] + + if isinstance(u, str): + s.append(u.strip('"')) + else: + for unodes in u["nodes"]: + s.append(unodes.strip('"')) + + if isinstance(v, str): + d.append(v.strip('"')) + else: + for vnodes in v["nodes"]: + d.append(vnodes.strip('"')) + + for source_node in s: + for destination_node in d: + N.add_edge(source_node, destination_node, **attr) + + # add default attributes for graph, nodes, edges + pattr = P.get_attributes() + if pattr: + N.graph["graph"] = pattr + try: + N.graph["node"] = P.get_node_defaults()[0] + except (IndexError, TypeError): + pass # N.graph['node']={} + try: + N.graph["edge"] = P.get_edge_defaults()[0] + except (IndexError, TypeError): + pass # N.graph['edge']={} + return N + + +def to_pydot(N): + """Returns a pydot graph from a NetworkX graph N. + + Parameters + ---------- + N : NetworkX graph + A graph created with NetworkX + + Examples + -------- + >>> K5 = nx.complete_graph(5) + >>> P = nx.nx_pydot.to_pydot(K5) + + Notes + ----- + + """ + import pydot + + # set Graphviz graph type + if N.is_directed(): + graph_type = "digraph" + else: + graph_type = "graph" + strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph() + + name = N.name + graph_defaults = N.graph.get("graph", {}) + if name == "": + P = pydot.Dot("", graph_type=graph_type, strict=strict, **graph_defaults) + else: + P = pydot.Dot( + f'"{name}"', graph_type=graph_type, strict=strict, **graph_defaults + ) + try: + P.set_node_defaults(**N.graph["node"]) + except KeyError: + pass + try: + P.set_edge_defaults(**N.graph["edge"]) + except KeyError: + pass + + for n, nodedata in N.nodes(data=True): + str_nodedata = {str(k): str(v) for k, v in nodedata.items()} + n = str(n) + p = pydot.Node(n, **str_nodedata) + P.add_node(p) + + if N.is_multigraph(): + for u, v, key, edgedata in N.edges(data=True, keys=True): + str_edgedata = {str(k): str(v) for k, v in edgedata.items() if k != "key"} + u, v = str(u), str(v) + edge = pydot.Edge(u, v, key=str(key), **str_edgedata) + P.add_edge(edge) + + else: + for u, v, edgedata in N.edges(data=True): + str_edgedata = {str(k): str(v) for k, v in edgedata.items()} + u, v = str(u), str(v) + edge = pydot.Edge(u, v, **str_edgedata) + P.add_edge(edge) + return P + + +def graphviz_layout(G, prog="neato", root=None): + """Create node positions using Pydot and Graphviz. + + Returns a dictionary of positions keyed by node. + + Parameters + ---------- + G : NetworkX Graph + The graph for which the layout is computed. + prog : string (default: 'neato') + The name of the GraphViz program to use for layout. + Options depend on GraphViz version but may include: + 'dot', 'twopi', 'fdp', 'sfdp', 'circo' + root : Node from G or None (default: None) + The node of G from which to start some layout algorithms. + + Returns + ------- + Dictionary of (x, y) positions keyed by node. + + Examples + -------- + >>> G = nx.complete_graph(4) + >>> pos = nx.nx_pydot.graphviz_layout(G) + >>> pos = nx.nx_pydot.graphviz_layout(G, prog="dot") + + Notes + ----- + This is a wrapper for pydot_layout. + """ + return pydot_layout(G=G, prog=prog, root=root) + + +def pydot_layout(G, prog="neato", root=None): + """Create node positions using :mod:`pydot` and Graphviz. + + Parameters + ---------- + G : Graph + NetworkX graph to be laid out. + prog : string (default: 'neato') + Name of the GraphViz command to use for layout. + Options depend on GraphViz version but may include: + 'dot', 'twopi', 'fdp', 'sfdp', 'circo' + root : Node from G or None (default: None) + The node of G from which to start some layout algorithms. + + Returns + ------- + dict + Dictionary of positions keyed by node. + + Examples + -------- + >>> G = nx.complete_graph(4) + >>> pos = nx.nx_pydot.pydot_layout(G) + >>> pos = nx.nx_pydot.pydot_layout(G, prog="dot") + + Notes + ----- + If you use complex node objects, they may have the same string + representation and GraphViz could treat them as the same node. + The layout may assign both nodes a single location. See Issue #1568 + If this occurs in your case, consider relabeling the nodes just + for the layout computation using something similar to:: + + H = nx.convert_node_labels_to_integers(G, label_attribute="node_label") + H_layout = nx.nx_pydot.pydot_layout(H, prog="dot") + G_layout = {H.nodes[n]["node_label"]: p for n, p in H_layout.items()} + + """ + import pydot + + P = to_pydot(G) + if root is not None: + P.set("root", str(root)) + + # List of low-level bytes comprising a string in the dot language converted + # from the passed graph with the passed external GraphViz command. + D_bytes = P.create_dot(prog=prog) + + # Unique string decoded from these bytes with the preferred locale encoding + D = str(D_bytes, encoding=getpreferredencoding()) + + if D == "": # no data returned + print(f"Graphviz layout with {prog} failed") + print() + print("To debug what happened try:") + print("P = nx.nx_pydot.to_pydot(G)") + print('P.write_dot("file.dot")') + print(f"And then run {prog} on file.dot") + return + + # List of one or more "pydot.Dot" instances deserialized from this string. + Q_list = pydot.graph_from_dot_data(D) + assert len(Q_list) == 1 + + # The first and only such instance, as guaranteed by the above assertion. + Q = Q_list[0] + + node_pos = {} + for n in G.nodes(): + str_n = str(n) + node = Q.get_node(pydot.quote_id_if_necessary(str_n)) + + if isinstance(node, list): + node = node[0] + pos = node.get_pos()[1:-1] # strip leading and trailing double quotes + if pos is not None: + xx, yy = pos.split(",") + node_pos[n] = (float(xx), float(yy)) + return node_pos diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/nx_pylab.py b/.venv/lib/python3.12/site-packages/networkx/drawing/nx_pylab.py new file mode 100644 index 0000000..778d6e8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/nx_pylab.py @@ -0,0 +1,3021 @@ +""" +********** +Matplotlib +********** + +Draw networks with matplotlib. + +Examples +-------- +>>> G = nx.complete_graph(5) +>>> nx.draw(G) + +See Also +-------- + - :doc:`matplotlib ` + - :func:`matplotlib.pyplot.scatter` + - :obj:`matplotlib.patches.FancyArrowPatch` +""" + +import collections +import itertools +from numbers import Number + +import networkx as nx + +__all__ = [ + "display", + "apply_matplotlib_colors", + "draw", + "draw_networkx", + "draw_networkx_nodes", + "draw_networkx_edges", + "draw_networkx_labels", + "draw_networkx_edge_labels", + "draw_bipartite", + "draw_circular", + "draw_kamada_kawai", + "draw_random", + "draw_spectral", + "draw_spring", + "draw_planar", + "draw_shell", + "draw_forceatlas2", +] + + +def apply_matplotlib_colors( + G, src_attr, dest_attr, map, vmin=None, vmax=None, nodes=True +): + """ + Apply colors from a matplotlib colormap to a graph. + + Reads values from the `src_attr` and use a matplotlib colormap + to produce a color. Write the color to `dest_attr`. + + Parameters + ---------- + G : nx.Graph + The graph to read and compute colors for. + + src_attr : str or other attribute name + The name of the attribute to read from the graph. + + dest_attr : str or other attribute name + The name of the attribute to write to on the graph. + + map : matplotlib.colormap + The matplotlib colormap to use. + + vmin : float, default None + The minimum value for scaling the colormap. If `None`, find the + minimum value of `src_attr`. + + vmax : float, default None + The maximum value for scaling the colormap. If `None`, find the + maximum value of `src_attr`. + + nodes : bool, default True + Whether the attribute names are edge attributes or node attributes. + """ + import matplotlib as mpl + + if nodes: + type_iter = G.nodes() + elif G.is_multigraph(): + type_iter = G.edges(keys=True) + else: + type_iter = G.edges() + + if vmin is None or vmax is None: + vals = [type_iter[a][src_attr] for a in type_iter] + if vmin is None: + vmin = min(vals) + if vmax is None: + vmax = max(vals) + + mapper = mpl.cm.ScalarMappable(cmap=map) + mapper.set_clim(vmin, vmax) + + def do_map(x): + # Cast numpy scalars to float + return tuple(float(x) for x in mapper.to_rgba(x)) + + if nodes: + nx.set_node_attributes( + G, {n: do_map(G.nodes[n][src_attr]) for n in G.nodes()}, dest_attr + ) + else: + nx.set_edge_attributes( + G, {e: do_map(G.edges[e][src_attr]) for e in type_iter}, dest_attr + ) + + +def display( + G, + canvas=None, + **kwargs, +): + """Draw the graph G. + + Draw the graph as a collection of nodes connected by edges. + The exact details of what the graph looks like are controled by the below + attributes. All nodes and nodes at the end of visible edges must have a + position set, but nearly all other node and edge attributes are options and + nodes or edges missing the attribute will use the default listed below. A more + complete discription of each parameter is given below this summary. + + .. list-table:: Default Visualization Attributes + :widths: 25 25 50 + :header-rows: 1 + + * - Parameter + - Default Attribute + - Default Value + * - pos + - `"pos"` + - If there is not position, a layout will be calculated with `nx.spring_layout`. + * - node_visible + - `"visible"` + - True + * - node_color + - `"color"` + - #1f78b4 + * - node_size + - `"size"` + - 300 + * - node_label + - `"label"` + - Dict describing the node label. Defaults create a black text with + the node name as the label. The dict respects these keys and defaults: + + * size : 12 + * color : black + * family : sans serif + * weight : normal + * alpha : 1.0 + * h_align : center + * v_align : center + * bbox : Dict describing a `matplotlib.patches.FancyBboxPatch`. + Default is None. + + * - node_shape + - `"shape"` + - "o" + * - node_alpha + - `"alpha"` + - 1.0 + * - node_border_width + - `"border_width"` + - 1.0 + * - node_border_color + - `"border_color"` + - Matching node_color + * - edge_visible + - `"visible"` + - True + * - edge_width + - `"width"` + - 1.0 + * - edge_color + - `"color"` + - Black (#000000) + * - edge_label + - `"label"` + - Dict describing the edge label. Defaults create black text with a + white bounding box. The dictionary respects these keys and defaults: + + * size : 12 + * color : black + * family : sans serif + * weight : normal + * alpha : 1.0 + * bbox : Dict describing a `matplotlib.patches.FancyBboxPatch`. + Default {"boxstyle": "round", "ec": (1.0, 1.0, 1.0), "fc": (1.0, 1.0, 1.0)} + * h_align : "center" + * v_align : "center" + * pos : 0.5 + * rotate : True + + * - edge_style + - `"style"` + - "-" + * - edge_alpha + - `"alpha"` + - 1.0 + * - arrowstyle + - `"arrowstyle"` + - ``"-|>"`` if `G` is directed else ``"-"`` + * - arrowsize + - `"arrowsize"` + - 10 if `G` is directed else 0 + * - edge_curvature + - `"curvature"` + - arc3 + * - edge_source_margin + - `"source_margin"` + - 0 + * - edge_target_margin + - `"target_margin"` + - 0 + + Parameters + ---------- + G : graph + A networkx graph + + canvas : Matplotlib Axes object, optional + Draw the graph in specified Matplotlib axes + + pos : string or function, default "pos" + A string naming the node attribute storing the position of nodes as a tuple. + Or a function to be called with input `G` which returns the layout as a dict keyed + by node to position tuple like the NetworkX layout functions. + If no nodes in the graph has the attribute, a spring layout is calculated. + + node_visible : string or bool, default visible + A string naming the node attribute which stores if a node should be drawn. + If `True`, all nodes will be visible while if `False` no nodes will be visible. + If incomplete, nodes missing this attribute will be shown by default. + + node_color : string, default "color" + A string naming the node attribute which stores the color of each node. + Visible nodes without this attribute will use '#1f78b4' as a default. + + node_size : string or number, default "size" + A string naming the node attribute which stores the size of each node. + Visible nodes without this attribute will use a default size of 300. + + node_label : string or bool, default "label" + A string naming the node attribute which stores the label of each node. + The attribute value can be a string, False (no label for that node), + True (the node is the label) or a dict keyed by node to the label. + + If a dict is specified, these keys are read to further control the label: + + * label : The text of the label; default: name of the node + * size : Font size of the label; default: 12 + * color : Font color of the label; default: black + * family : Font family of the label; default: "sans-serif" + * weight : Font weight of the label; default: "normal" + * alpha : Alpha value of the label; default: 1.0 + * h_align : The horizontal alignment of the label. + one of "left", "center", "right"; default: "center" + * v_align : The vertical alignment of the label. + one of "top", "center", "bottom"; default: "center" + * bbox : A dict of parameters for `matplotlib.patches.FancyBboxPatch`. + + Visible nodes without this attribute will be treated as if the value was True. + + node_shape : string, default "shape" + A string naming the node attribute which stores the label of each node. + The values of this attribute are expected to be one of the matplotlib shapes, + one of 'so^>v"`` for directed graphs. + + See `matplotlib.patches.ArrowStyle` for more options + + arrowsize : string or int, default "arrow_size" + A string naming the edge attribute which stores the size of the arrowhead for each + edge. Visible edges without this attribute will use a default value of 10. + + edge_curvature : string, default "curvature" + A string naming the edge attribute storing the curvature and connection style + of each edge. Visible edges without this attribute will use "arc3" as a default + value, resulting an a straight line between the two nodes. Curvature can be given + as 'arc3,rad=0.2' to specify both the style and radius of curvature. + + Please see `matplotlib.patches.ConnectionStyle` and + `matplotlib.patches.FancyArrowPatch` for more information. + + edge_source_margin : string or int, default "source_margin" + A string naming the edge attribute which stores the minimum margin (gap) between + the source node and the start of the edge. Visible edges without this attribute + will use a default value of 0. + + edge_target_margin : string or int, default "target_margin" + A string naming the edge attribute which stores the minimumm margin (gap) between + the target node and the end of the edge. Visible edges without this attribute + will use a default value of 0. + + hide_ticks : bool, default True + Weather to remove the ticks from the axes of the matplotlib object. + + Raises + ------ + NetworkXError + If a node or edge is missing a required parameter such as `pos` or + if `display` receives an argument not listed above. + + ValueError + If a node or edge has an invalid color format, i.e. not a color string, + rgb tuple or rgba tuple. + + Returns + ------- + The input graph. This is potentially useful for dispatching visualization + functions. + """ + from collections import Counter + + import matplotlib as mpl + import matplotlib.pyplot as plt + import numpy as np + + defaults = { + "node_pos": None, + "node_visible": True, + "node_color": "#1f78b4", + "node_size": 300, + "node_label": { + "size": 12, + "color": "#000000", + "family": "sans-serif", + "weight": "normal", + "alpha": 1.0, + "h_align": "center", + "v_align": "center", + "bbox": None, + }, + "node_shape": "o", + "node_alpha": 1.0, + "node_border_width": 1.0, + "node_border_color": "face", + "edge_visible": True, + "edge_width": 1.0, + "edge_color": "#000000", + "edge_label": { + "size": 12, + "color": "#000000", + "family": "sans-serif", + "weight": "normal", + "alpha": 1.0, + "bbox": {"boxstyle": "round", "ec": (1.0, 1.0, 1.0), "fc": (1.0, 1.0, 1.0)}, + "h_align": "center", + "v_align": "center", + "pos": 0.5, + "rotate": True, + }, + "edge_style": "-", + "edge_alpha": 1.0, + "edge_arrowstyle": "-|>" if G.is_directed() else "-", + "edge_arrowsize": 10 if G.is_directed() else 0, + "edge_curvature": "arc3", + "edge_source_margin": 0, + "edge_target_margin": 0, + "hide_ticks": True, + } + + # Check arguments + for kwarg in kwargs: + if kwarg not in defaults: + raise nx.NetworkXError( + f"Unrecongized visualization keyword argument: {kwarg}" + ) + + if canvas is None: + canvas = plt.gca() + + if kwargs.get("hide_ticks", defaults["hide_ticks"]): + canvas.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + ### Helper methods and classes + + def node_property_sequence(seq, attr): + """Return a list of attribute values for `seq`, using a default if needed""" + + # All node attribute parameters start with "node_" + param_name = f"node_{attr}" + default = defaults[param_name] + attr = kwargs.get(param_name, attr) + + if default is None: + # raise instead of using non-existant default value + for n in seq: + if attr not in node_subgraph.nodes[n]: + raise nx.NetworkXError(f"Attribute '{attr}' missing for node {n}") + + # If `attr` is not a graph attr and was explicitly passed as an argument + # it must be a user-default value. Allow attr=None to tell draw to skip + # attributes which are on the graph + if ( + attr is not None + and nx.get_node_attributes(node_subgraph, attr) == {} + and any(attr == v for k, v in kwargs.items() if "node" in k) + ): + return [attr for _ in seq] + + return [node_subgraph.nodes[n].get(attr, default) for n in seq] + + def compute_colors(color, alpha): + if isinstance(color, str): + rgba = mpl.colors.colorConverter.to_rgba(color) + # Using a non-default alpha value overrides any alpha value in the color + if alpha != defaults["node_alpha"]: + return (rgba[0], rgba[1], rgba[2], alpha) + return rgba + + if isinstance(color, tuple) and len(color) == 3: + return (color[0], color[1], color[2], alpha) + + if isinstance(color, tuple) and len(color) == 4: + return color + + raise ValueError(f"Invalid format for color: {color}") + + # Find which edges can be plotted as a line collection + # + # Non-default values for these attributes require fancy arrow patches: + # - any arrow style (including the default -|> for directed graphs) + # - arrow size (by extension of style) + # - connection style + # - min_source_margin + # - min_target_margin + + def collection_compatible(e): + return ( + get_edge_attr(e, "arrowstyle") == "-" + and get_edge_attr(e, "curvature") == "arc3" + and get_edge_attr(e, "source_margin") == 0 + and get_edge_attr(e, "target_margin") == 0 + # Self-loops will use fancy arrow patches + and e[0] != e[1] + ) + + def edge_property_sequence(seq, attr): + """Return a list of attribute values for `seq`, using a default if needed""" + + param_name = f"edge_{attr}" + default = defaults[param_name] + attr = kwargs.get(param_name, attr) + + if default is None: + # raise instead of using non-existant default value + for e in seq: + if attr not in edge_subgraph.edges[e]: + raise nx.NetworkXError(f"Attribute '{attr}' missing for edge {e}") + + if ( + attr is not None + and nx.get_edge_attributes(edge_subgraph, attr) == {} + and any(attr == v for k, v in kwargs.items() if "edge" in k) + ): + return [attr for _ in seq] + + return [edge_subgraph.edges[e].get(attr, default) for e in seq] + + def get_edge_attr(e, attr): + """Return the final edge attribute value, using default if not None""" + + param_name = f"edge_{attr}" + default = defaults[param_name] + attr = kwargs.get(param_name, attr) + + if default is None and attr not in edge_subgraph.edges[e]: + raise nx.NetworkXError(f"Attribute '{attr}' missing from edge {e}") + + if ( + attr is not None + and nx.get_edge_attributes(edge_subgraph, attr) == {} + and attr in kwargs.values() + ): + return attr + + return edge_subgraph.edges[e].get(attr, default) + + def get_node_attr(n, attr, use_edge_subgraph=True): + """Return the final node attribute value, using default if not None""" + subgraph = edge_subgraph if use_edge_subgraph else node_subgraph + + param_name = f"node_{attr}" + default = defaults[param_name] + attr = kwargs.get(param_name, attr) + + if default is None and attr not in subgraph.nodes[n]: + raise nx.NetworkXError(f"Attribute '{attr}' missing from node {n}") + + if ( + attr is not None + and nx.get_node_attributes(subgraph, attr) == {} + and attr in kwargs.values() + ): + return attr + + return subgraph.nodes[n].get(attr, default) + + # Taken from ConnectionStyleFactory + def self_loop(edge_index, node_size): + def self_loop_connection(posA, posB, *args, **kwargs): + if not np.all(posA == posB): + raise nx.NetworkXError( + "`self_loop` connection style method" + "is only to be used for self-loops" + ) + # this is called with _screen space_ values + # so convert back to data space + data_loc = canvas.transData.inverted().transform(posA) + # Scale self loop based on the size of the base node + # Size of nodes are given in points ** 2 and each point is 1/72 of an inch + v_shift = np.sqrt(node_size) / 72 + h_shift = v_shift * 0.5 + # put the top of the loop first so arrow is not hidden by node + path = np.asarray( + [ + # 1 + [0, v_shift], + # 4 4 4 + [h_shift, v_shift], + [h_shift, 0], + [0, 0], + # 4 4 4 + [-h_shift, 0], + [-h_shift, v_shift], + [0, v_shift], + ] + ) + # Rotate self loop 90 deg. if more than 1 + # This will allow for maximum of 4 visible self loops + if edge_index % 4: + x, y = path.T + for _ in range(edge_index % 4): + x, y = y, -x + path = np.array([x, y]).T + return mpl.path.Path( + canvas.transData.transform(data_loc + path), [1, 4, 4, 4, 4, 4, 4] + ) + + return self_loop_connection + + def to_marker_edge(size, marker): + if marker in "s^>v 90: + angle -= 180 + if angle < -90: + angle += 180 + (x, y) = self.ax.transData.inverted().transform((x, y)) + return x, y, angle + + def draw(self, renderer): + # recalculate the text position and angle + self.x, self.y, self.angle = self._update_text_pos_angle(self.arrow) + self.set_position((self.x, self.y)) + self.set_rotation(self.angle) + # redraw text + super().draw(renderer) + + ### Draw the nodes first + node_visible = kwargs.get("node_visible", "visible") + if isinstance(node_visible, bool): + if node_visible: + visible_nodes = G.nodes() + else: + visible_nodes = [] + else: + visible_nodes = [ + n for n, v in nx.get_node_attributes(G, node_visible, True).items() if v + ] + + node_subgraph = G.subgraph(visible_nodes) + + # Ignore the default dict value since that's for default values to use, not + # default attribute name + pos = kwargs.get("node_pos", "pos") + + default_display_pos_attr = "display's position attribute name" + if callable(pos): + nx.set_node_attributes( + node_subgraph, pos(node_subgraph), default_display_pos_attr + ) + pos = default_display_pos_attr + kwargs["node_pos"] = default_display_pos_attr + elif nx.get_node_attributes(G, pos) == {}: + nx.set_node_attributes( + node_subgraph, nx.spring_layout(node_subgraph), default_display_pos_attr + ) + pos = default_display_pos_attr + kwargs["node_pos"] = default_display_pos_attr + + # Each shape requires a new scatter object since they can't have different + # shapes. + if len(visible_nodes) > 0: + node_shape = kwargs.get("node_shape", "shape") + for shape in Counter( + nx.get_node_attributes( + node_subgraph, node_shape, defaults["node_shape"] + ).values() + ): + # Filter position just on this shape. + nodes_with_shape = [ + n + for n, s in node_subgraph.nodes(data=node_shape) + if s == shape or (s is None and shape == defaults["node_shape"]) + ] + # There are two property sequences to create before hand. + # 1. position, since it is used for x and y parameters to scatter + # 2. edgecolor, since the spaeical 'face' parameter value can only be + # be passed in as the sole string, not part of a list of strings. + position = np.asarray(node_property_sequence(nodes_with_shape, "pos")) + color = np.asarray( + [ + compute_colors(c, a) + for c, a in zip( + node_property_sequence(nodes_with_shape, "color"), + node_property_sequence(nodes_with_shape, "alpha"), + ) + ] + ) + border_color = np.asarray( + [ + ( + c + if ( + c := get_node_attr( + n, + "border_color", + False, + ) + ) + != "face" + else color[i] + ) + for i, n in enumerate(nodes_with_shape) + ] + ) + canvas.scatter( + position[:, 0], + position[:, 1], + s=node_property_sequence(nodes_with_shape, "size"), + c=color, + marker=shape, + linewidths=node_property_sequence(nodes_with_shape, "border_width"), + edgecolors=border_color, + zorder=2, + ) + + ### Draw node labels + node_label = kwargs.get("node_label", "label") + # Plot labels if node_label is not None and not False + if node_label is not None and node_label is not False: + default_dict = {} + if isinstance(node_label, dict): + default_dict = node_label + node_label = None + + for n, lbl in node_subgraph.nodes(data=node_label): + if lbl is False: + continue + + # We work with label dicts down here... + if not isinstance(lbl, dict): + lbl = {"label": lbl if lbl is not None else n} + + lbl_text = lbl.get("label", n) + if not isinstance(lbl_text, str): + lbl_text = str(lbl_text) + + lbl.update(default_dict) + x, y = node_subgraph.nodes[n][pos] + canvas.text( + x, + y, + lbl_text, + size=lbl.get("size", defaults["node_label"]["size"]), + color=lbl.get("color", defaults["node_label"]["color"]), + family=lbl.get("family", defaults["node_label"]["family"]), + weight=lbl.get("weight", defaults["node_label"]["weight"]), + horizontalalignment=lbl.get( + "h_align", defaults["node_label"]["h_align"] + ), + verticalalignment=lbl.get("v_align", defaults["node_label"]["v_align"]), + transform=canvas.transData, + bbox=lbl.get("bbox", defaults["node_label"]["bbox"]), + ) + + ### Draw edges + + edge_visible = kwargs.get("edge_visible", "visible") + if isinstance(edge_visible, bool): + if edge_visible: + visible_edges = G.edges() + else: + visible_edges = [] + else: + visible_edges = [ + e for e, v in nx.get_edge_attributes(G, edge_visible, True).items() if v + ] + + edge_subgraph = G.edge_subgraph(visible_edges) + print(nx.get_node_attributes(node_subgraph, pos)) + nx.set_node_attributes( + edge_subgraph, nx.get_node_attributes(node_subgraph, pos), name=pos + ) + + collection_edges = ( + [e for e in edge_subgraph.edges(keys=True) if collection_compatible(e)] + if edge_subgraph.is_multigraph() + else [e for e in edge_subgraph.edges() if collection_compatible(e)] + ) + non_collection_edges = ( + [e for e in edge_subgraph.edges(keys=True) if not collection_compatible(e)] + if edge_subgraph.is_multigraph() + else [e for e in edge_subgraph.edges() if not collection_compatible(e)] + ) + edge_position = np.asarray( + [ + ( + get_node_attr(u, "pos", use_edge_subgraph=True), + get_node_attr(v, "pos", use_edge_subgraph=True), + ) + for u, v, *_ in collection_edges + ] + ) + + # Only plot a line collection if needed + if len(collection_edges) > 0: + edge_collection = mpl.collections.LineCollection( + edge_position, + colors=edge_property_sequence(collection_edges, "color"), + linewidths=edge_property_sequence(collection_edges, "width"), + linestyle=edge_property_sequence(collection_edges, "style"), + alpha=edge_property_sequence(collection_edges, "alpha"), + antialiaseds=(1,), + zorder=1, + ) + canvas.add_collection(edge_collection) + + fancy_arrows = {} + if len(non_collection_edges) > 0: + for e in non_collection_edges: + # Cache results for use in edge labels + fancy_arrows[e] = build_fancy_arrow(e) + canvas.add_patch(fancy_arrows[e]) + + ### Draw edge labels + edge_label = kwargs.get("edge_label", "label") + default_dict = {} + if isinstance(edge_label, dict): + default_dict = edge_label + # Restore the default label attribute key of 'label' + edge_label = "label" + + # Handle multigraphs + edge_label_data = ( + edge_subgraph.edges(data=edge_label, keys=True) + if edge_subgraph.is_multigraph() + else edge_subgraph.edges(data=edge_label) + ) + if edge_label is not None and edge_label is not False: + for *e, lbl in edge_label_data: + e = tuple(e) + # I'm not sure how I want to handle None here... For now it means no label + if lbl is False or lbl is None: + continue + + if not isinstance(lbl, dict): + lbl = {"label": lbl} + + lbl.update(default_dict) + lbl_text = lbl.get("label") + if not isinstance(lbl_text, str): + lbl_text = str(lbl_text) + + # In the old code, every non-self-loop is placed via a fancy arrow patch + # Only compute a new fancy arrow if needed by caching the results from + # edge placement. + try: + arrow = fancy_arrows[e] + except KeyError: + arrow = build_fancy_arrow(e) + + if e[0] == e[1]: + # Taken directly from draw_networkx_edge_labels + connectionstyle_obj = arrow.get_connectionstyle() + posA = canvas.transData.transform(edge_subgraph.nodes[e[0]][pos]) + path_disp = connectionstyle_obj(posA, posA) + path_data = canvas.transData.inverted().transform_path(path_disp) + x, y = path_data.vertices[0] + canvas.text( + x, + y, + lbl_text, + size=lbl.get("size", defaults["edge_label"]["size"]), + color=lbl.get("color", defaults["edge_label"]["color"]), + family=lbl.get("family", defaults["edge_label"]["family"]), + weight=lbl.get("weight", defaults["edge_label"]["weight"]), + alpha=lbl.get("alpha", defaults["edge_label"]["alpha"]), + horizontalalignment=lbl.get( + "h_align", defaults["edge_label"]["h_align"] + ), + verticalalignment=lbl.get( + "v_align", defaults["edge_label"]["v_align"] + ), + rotation=0, + transform=canvas.transData, + bbox=lbl.get("bbox", defaults["edge_label"]["bbox"]), + zorder=1, + ) + continue + + CurvedArrowText( + arrow, + lbl_text, + size=lbl.get("size", defaults["edge_label"]["size"]), + color=lbl.get("color", defaults["edge_label"]["color"]), + family=lbl.get("family", defaults["edge_label"]["family"]), + weight=lbl.get("weight", defaults["edge_label"]["weight"]), + alpha=lbl.get("alpha", defaults["edge_label"]["alpha"]), + bbox=lbl.get("bbox", defaults["edge_label"]["bbox"]), + horizontalalignment=lbl.get( + "h_align", defaults["edge_label"]["h_align"] + ), + verticalalignment=lbl.get("v_align", defaults["edge_label"]["v_align"]), + label_pos=lbl.get("pos", defaults["edge_label"]["pos"]), + labels_horizontal=lbl.get("rotate", defaults["edge_label"]["rotate"]), + transform=canvas.transData, + zorder=1, + ax=canvas, + ) + + # If we had to add an attribute, remove it here + if pos == default_display_pos_attr: + nx.remove_node_attributes(G, default_display_pos_attr) + + return G + + +def draw(G, pos=None, ax=None, **kwds): + """Draw the graph G with Matplotlib. + + Draw the graph as a simple representation with no node + labels or edge labels and using the full Matplotlib figure area + and no axis labels by default. See draw_networkx() for more + full-featured drawing that allows title, axis labels etc. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary, optional + A dictionary with nodes as keys and positions as values. + If not specified a spring layout positioning will be computed. + See :py:mod:`networkx.drawing.layout` for functions that + compute node positions. + + ax : Matplotlib Axes object, optional + Draw the graph in specified Matplotlib axes. + + kwds : optional keywords + See networkx.draw_networkx() for a description of optional keywords. + + Examples + -------- + >>> G = nx.dodecahedral_graph() + >>> nx.draw(G) + >>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout + + See Also + -------- + draw_networkx + draw_networkx_nodes + draw_networkx_edges + draw_networkx_labels + draw_networkx_edge_labels + + Notes + ----- + This function has the same name as pylab.draw and pyplot.draw + so beware when using `from networkx import *` + + since you might overwrite the pylab.draw function. + + With pyplot use + + >>> import matplotlib.pyplot as plt + >>> G = nx.dodecahedral_graph() + >>> nx.draw(G) # networkx draw() + >>> plt.draw() # pyplot draw() + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + """ + + import matplotlib.pyplot as plt + + if ax is None: + cf = plt.gcf() + else: + cf = ax.get_figure() + cf.set_facecolor("w") + if ax is None: + if cf.axes: + ax = cf.gca() + else: + ax = cf.add_axes((0, 0, 1, 1)) + + if "with_labels" not in kwds: + kwds["with_labels"] = "labels" in kwds + + draw_networkx(G, pos=pos, ax=ax, **kwds) + ax.set_axis_off() + plt.draw_if_interactive() + return + + +def draw_networkx(G, pos=None, arrows=None, with_labels=True, **kwds): + r"""Draw the graph G using Matplotlib. + + Draw the graph with Matplotlib with options for node positions, + labeling, titles, and many other drawing features. + See draw() for simple drawing without labels or axes. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary, optional + A dictionary with nodes as keys and positions as values. + If not specified a spring layout positioning will be computed. + See :py:mod:`networkx.drawing.layout` for functions that + compute node positions. + + arrows : bool or None, optional (default=None) + If `None`, directed graphs draw arrowheads with + `~matplotlib.patches.FancyArrowPatch`, while undirected graphs draw edges + via `~matplotlib.collections.LineCollection` for speed. + If `True`, draw arrowheads with FancyArrowPatches (bendable and stylish). + If `False`, draw edges using LineCollection (linear and fast). + For directed graphs, if True draw arrowheads. + Note: Arrows will be the same color as edges. + + arrowstyle : str (default='-\|>' for directed graphs) + For directed graphs, choose the style of the arrowsheads. + For undirected graphs default to '-' + + See `matplotlib.patches.ArrowStyle` for more options. + + arrowsize : int or list (default=10) + For directed graphs, choose the size of the arrow head's length and + width. A list of values can be passed in to assign a different size for arrow head's length and width. + See `matplotlib.patches.FancyArrowPatch` for attribute `mutation_scale` + for more info. + + with_labels : bool (default=True) + Set to True to draw labels on the nodes. + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + + nodelist : list (default=list(G)) + Draw only specified nodes + + edgelist : list (default=list(G.edges())) + Draw only specified edges + + node_size : scalar or array (default=300) + Size of nodes. If an array is specified it must be the + same length as nodelist. + + node_color : color or array of colors (default='#1f78b4') + Node color. Can be a single color or a sequence of colors with the same + length as nodelist. Color can be string or rgb (or rgba) tuple of + floats from 0-1. If numeric values are specified they will be + mapped to colors using the cmap and vmin,vmax parameters. See + matplotlib.scatter for more details. + + node_shape : string (default='o') + The shape of the node. Specification is as matplotlib.scatter + marker, one of 'so^>v>> G = nx.dodecahedral_graph() + >>> nx.draw(G) + >>> nx.draw(G, pos=nx.spring_layout(G)) # use spring layout + + >>> import matplotlib.pyplot as plt + >>> limits = plt.axis("off") # turn off axis + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx_nodes + draw_networkx_edges + draw_networkx_labels + draw_networkx_edge_labels + """ + from inspect import signature + + import matplotlib.pyplot as plt + + # Get all valid keywords by inspecting the signatures of draw_networkx_nodes, + # draw_networkx_edges, draw_networkx_labels + + valid_node_kwds = signature(draw_networkx_nodes).parameters.keys() + valid_edge_kwds = signature(draw_networkx_edges).parameters.keys() + valid_label_kwds = signature(draw_networkx_labels).parameters.keys() + + # Create a set with all valid keywords across the three functions and + # remove the arguments of this function (draw_networkx) + valid_kwds = (valid_node_kwds | valid_edge_kwds | valid_label_kwds) - { + "G", + "pos", + "arrows", + "with_labels", + } + + if any(k not in valid_kwds for k in kwds): + invalid_args = ", ".join([k for k in kwds if k not in valid_kwds]) + raise ValueError(f"Received invalid argument(s): {invalid_args}") + + node_kwds = {k: v for k, v in kwds.items() if k in valid_node_kwds} + edge_kwds = {k: v for k, v in kwds.items() if k in valid_edge_kwds} + label_kwds = {k: v for k, v in kwds.items() if k in valid_label_kwds} + + if pos is None: + pos = nx.drawing.spring_layout(G) # default to spring layout + + draw_networkx_nodes(G, pos, **node_kwds) + draw_networkx_edges(G, pos, arrows=arrows, **edge_kwds) + if with_labels: + draw_networkx_labels(G, pos, **label_kwds) + plt.draw_if_interactive() + + +def draw_networkx_nodes( + G, + pos, + nodelist=None, + node_size=300, + node_color="#1f78b4", + node_shape="o", + alpha=None, + cmap=None, + vmin=None, + vmax=None, + ax=None, + linewidths=None, + edgecolors=None, + label=None, + margins=None, + hide_ticks=True, +): + """Draw the nodes of the graph G. + + This draws only the nodes of the graph G. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary + A dictionary with nodes as keys and positions as values. + Positions should be sequences of length 2. + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + + nodelist : list (default list(G)) + Draw only specified nodes + + node_size : scalar or array (default=300) + Size of nodes. If an array it must be the same length as nodelist. + + node_color : color or array of colors (default='#1f78b4') + Node color. Can be a single color or a sequence of colors with the same + length as nodelist. Color can be string or rgb (or rgba) tuple of + floats from 0-1. If numeric values are specified they will be + mapped to colors using the cmap and vmin,vmax parameters. See + matplotlib.scatter for more details. + + node_shape : string (default='o') + The shape of the node. Specification is as matplotlib.scatter + marker, one of 'so^>v>> G = nx.dodecahedral_graph() + >>> nodes = nx.draw_networkx_nodes(G, pos=nx.spring_layout(G)) + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx + draw_networkx_edges + draw_networkx_labels + draw_networkx_edge_labels + """ + from collections.abc import Iterable + + import matplotlib as mpl + import matplotlib.collections # call as mpl.collections + import matplotlib.pyplot as plt + import numpy as np + + if ax is None: + ax = plt.gca() + + if nodelist is None: + nodelist = list(G) + + if len(nodelist) == 0: # empty nodelist, no drawing + return mpl.collections.PathCollection(None) + + try: + xy = np.asarray([pos[v] for v in nodelist]) + except KeyError as err: + raise nx.NetworkXError(f"Node {err} has no position.") from err + + if isinstance(alpha, Iterable): + node_color = apply_alpha(node_color, alpha, nodelist, cmap, vmin, vmax) + alpha = None + + if not isinstance(node_shape, np.ndarray) and not isinstance(node_shape, list): + node_shape = np.array([node_shape for _ in range(len(nodelist))]) + + for shape in np.unique(node_shape): + node_collection = ax.scatter( + xy[node_shape == shape, 0], + xy[node_shape == shape, 1], + s=node_size, + c=node_color, + marker=shape, + cmap=cmap, + vmin=vmin, + vmax=vmax, + alpha=alpha, + linewidths=linewidths, + edgecolors=edgecolors, + label=label, + ) + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + if margins is not None: + if isinstance(margins, Iterable): + ax.margins(*margins) + else: + ax.margins(margins) + + node_collection.set_zorder(2) + return node_collection + + +class FancyArrowFactory: + """Draw arrows with `matplotlib.patches.FancyarrowPatch`""" + + class ConnectionStyleFactory: + def __init__(self, connectionstyles, selfloop_height, ax=None): + import matplotlib as mpl + import matplotlib.path # call as mpl.path + import numpy as np + + self.ax = ax + self.mpl = mpl + self.np = np + self.base_connection_styles = [ + mpl.patches.ConnectionStyle(cs) for cs in connectionstyles + ] + self.n = len(self.base_connection_styles) + self.selfloop_height = selfloop_height + + def curved(self, edge_index): + return self.base_connection_styles[edge_index % self.n] + + def self_loop(self, edge_index): + def self_loop_connection(posA, posB, *args, **kwargs): + if not self.np.all(posA == posB): + raise nx.NetworkXError( + "`self_loop` connection style method" + "is only to be used for self-loops" + ) + # this is called with _screen space_ values + # so convert back to data space + data_loc = self.ax.transData.inverted().transform(posA) + v_shift = 0.1 * self.selfloop_height + h_shift = v_shift * 0.5 + # put the top of the loop first so arrow is not hidden by node + path = self.np.asarray( + [ + # 1 + [0, v_shift], + # 4 4 4 + [h_shift, v_shift], + [h_shift, 0], + [0, 0], + # 4 4 4 + [-h_shift, 0], + [-h_shift, v_shift], + [0, v_shift], + ] + ) + # Rotate self loop 90 deg. if more than 1 + # This will allow for maximum of 4 visible self loops + if edge_index % 4: + x, y = path.T + for _ in range(edge_index % 4): + x, y = y, -x + path = self.np.array([x, y]).T + return self.mpl.path.Path( + self.ax.transData.transform(data_loc + path), [1, 4, 4, 4, 4, 4, 4] + ) + + return self_loop_connection + + def __init__( + self, + edge_pos, + edgelist, + nodelist, + edge_indices, + node_size, + selfloop_height, + connectionstyle="arc3", + node_shape="o", + arrowstyle="-", + arrowsize=10, + edge_color="k", + alpha=None, + linewidth=1.0, + style="solid", + min_source_margin=0, + min_target_margin=0, + ax=None, + ): + import matplotlib as mpl + import matplotlib.patches # call as mpl.patches + import matplotlib.pyplot as plt + import numpy as np + + if isinstance(connectionstyle, str): + connectionstyle = [connectionstyle] + elif np.iterable(connectionstyle): + connectionstyle = list(connectionstyle) + else: + msg = "ConnectionStyleFactory arg `connectionstyle` must be str or iterable" + raise nx.NetworkXError(msg) + self.ax = ax + self.mpl = mpl + self.np = np + self.edge_pos = edge_pos + self.edgelist = edgelist + self.nodelist = nodelist + self.node_shape = node_shape + self.min_source_margin = min_source_margin + self.min_target_margin = min_target_margin + self.edge_indices = edge_indices + self.node_size = node_size + self.connectionstyle_factory = self.ConnectionStyleFactory( + connectionstyle, selfloop_height, ax + ) + self.arrowstyle = arrowstyle + self.arrowsize = arrowsize + self.arrow_colors = mpl.colors.colorConverter.to_rgba_array(edge_color, alpha) + self.linewidth = linewidth + self.style = style + if isinstance(arrowsize, list) and len(arrowsize) != len(edge_pos): + raise ValueError("arrowsize should have the same length as edgelist") + + def __call__(self, i): + (x1, y1), (x2, y2) = self.edge_pos[i] + shrink_source = 0 # space from source to tail + shrink_target = 0 # space from head to target + if ( + self.np.iterable(self.min_source_margin) + and not isinstance(self.min_source_margin, str) + and not isinstance(self.min_source_margin, tuple) + ): + min_source_margin = self.min_source_margin[i] + else: + min_source_margin = self.min_source_margin + + if ( + self.np.iterable(self.min_target_margin) + and not isinstance(self.min_target_margin, str) + and not isinstance(self.min_target_margin, tuple) + ): + min_target_margin = self.min_target_margin[i] + else: + min_target_margin = self.min_target_margin + + if self.np.iterable(self.node_size): # many node sizes + source, target = self.edgelist[i][:2] + source_node_size = self.node_size[self.nodelist.index(source)] + target_node_size = self.node_size[self.nodelist.index(target)] + shrink_source = self.to_marker_edge(source_node_size, self.node_shape) + shrink_target = self.to_marker_edge(target_node_size, self.node_shape) + else: + shrink_source = self.to_marker_edge(self.node_size, self.node_shape) + shrink_target = shrink_source + shrink_source = max(shrink_source, min_source_margin) + shrink_target = max(shrink_target, min_target_margin) + + # scale factor of arrow head + if isinstance(self.arrowsize, list): + mutation_scale = self.arrowsize[i] + else: + mutation_scale = self.arrowsize + + if len(self.arrow_colors) > i: + arrow_color = self.arrow_colors[i] + elif len(self.arrow_colors) == 1: + arrow_color = self.arrow_colors[0] + else: # Cycle through colors + arrow_color = self.arrow_colors[i % len(self.arrow_colors)] + + if self.np.iterable(self.linewidth): + if len(self.linewidth) > i: + linewidth = self.linewidth[i] + else: + linewidth = self.linewidth[i % len(self.linewidth)] + else: + linewidth = self.linewidth + + if ( + self.np.iterable(self.style) + and not isinstance(self.style, str) + and not isinstance(self.style, tuple) + ): + if len(self.style) > i: + linestyle = self.style[i] + else: # Cycle through styles + linestyle = self.style[i % len(self.style)] + else: + linestyle = self.style + + if x1 == x2 and y1 == y2: + connectionstyle = self.connectionstyle_factory.self_loop( + self.edge_indices[i] + ) + else: + connectionstyle = self.connectionstyle_factory.curved(self.edge_indices[i]) + + if ( + self.np.iterable(self.arrowstyle) + and not isinstance(self.arrowstyle, str) + and not isinstance(self.arrowstyle, tuple) + ): + arrowstyle = self.arrowstyle[i] + else: + arrowstyle = self.arrowstyle + + return self.mpl.patches.FancyArrowPatch( + (x1, y1), + (x2, y2), + arrowstyle=arrowstyle, + shrinkA=shrink_source, + shrinkB=shrink_target, + mutation_scale=mutation_scale, + color=arrow_color, + linewidth=linewidth, + connectionstyle=connectionstyle, + linestyle=linestyle, + zorder=1, # arrows go behind nodes + ) + + def to_marker_edge(self, marker_size, marker): + if marker in "s^>v', + For undirected graphs default to '-'. + + See `matplotlib.patches.ArrowStyle` for more options. + + arrowsize : int or list of ints(default=10) + For directed graphs, choose the size of the arrow head's length and + width. See `matplotlib.patches.FancyArrowPatch` for attribute + `mutation_scale` for more info. + + connectionstyle : string or iterable of strings (default="arc3") + Pass the connectionstyle parameter to create curved arc of rounding + radius rad. For example, connectionstyle='arc3,rad=0.2'. + See `matplotlib.patches.ConnectionStyle` and + `matplotlib.patches.FancyArrowPatch` for more info. + If Iterable, index indicates i'th edge key of MultiGraph + + node_size : scalar or array (default=300) + Size of nodes. Though the nodes are not drawn with this function, the + node size is used in determining edge positioning. + + nodelist : list, optional (default=G.nodes()) + This provides the node order for the `node_size` array (if it is an array). + + node_shape : string (default='o') + The marker used for nodes, used in determining edge positioning. + Specification is as a `matplotlib.markers` marker, e.g. one of 'so^>v>> G = nx.dodecahedral_graph() + >>> edges = nx.draw_networkx_edges(G, pos=nx.spring_layout(G)) + + >>> G = nx.DiGraph() + >>> G.add_edges_from([(1, 2), (1, 3), (2, 3)]) + >>> arcs = nx.draw_networkx_edges(G, pos=nx.spring_layout(G)) + >>> alphas = [0.3, 0.4, 0.5] + >>> for i, arc in enumerate(arcs): # change alpha values of arcs + ... arc.set_alpha(alphas[i]) + + The FancyArrowPatches corresponding to self-loops are not always + returned, but can always be accessed via the ``patches`` attribute of the + `matplotlib.Axes` object. + + >>> import matplotlib.pyplot as plt + >>> fig, ax = plt.subplots() + >>> G = nx.Graph([(0, 1), (0, 0)]) # Self-loop at node 0 + >>> edge_collection = nx.draw_networkx_edges(G, pos=nx.circular_layout(G), ax=ax) + >>> self_loop_fap = ax.patches[0] + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx + draw_networkx_nodes + draw_networkx_labels + draw_networkx_edge_labels + + """ + import warnings + + import matplotlib as mpl + import matplotlib.collections # call as mpl.collections + import matplotlib.colors # call as mpl.colors + import matplotlib.pyplot as plt + import numpy as np + + # The default behavior is to use LineCollection to draw edges for + # undirected graphs (for performance reasons) and use FancyArrowPatches + # for directed graphs. + # The `arrows` keyword can be used to override the default behavior + if arrows is None: + use_linecollection = not (G.is_directed() or G.is_multigraph()) + else: + if not isinstance(arrows, bool): + raise TypeError("Argument `arrows` must be of type bool or None") + use_linecollection = not arrows + + if isinstance(connectionstyle, str): + connectionstyle = [connectionstyle] + elif np.iterable(connectionstyle): + connectionstyle = list(connectionstyle) + else: + msg = "draw_networkx_edges arg `connectionstyle` must be str or iterable" + raise nx.NetworkXError(msg) + + # Some kwargs only apply to FancyArrowPatches. Warn users when they use + # non-default values for these kwargs when LineCollection is being used + # instead of silently ignoring the specified option + if use_linecollection: + msg = ( + "\n\nThe {0} keyword argument is not applicable when drawing edges\n" + "with LineCollection.\n\n" + "To make this warning go away, either specify `arrows=True` to\n" + "force FancyArrowPatches or use the default values.\n" + "Note that using FancyArrowPatches may be slow for large graphs.\n" + ) + if arrowstyle is not None: + warnings.warn(msg.format("arrowstyle"), category=UserWarning, stacklevel=2) + if arrowsize != 10: + warnings.warn(msg.format("arrowsize"), category=UserWarning, stacklevel=2) + if min_source_margin != 0: + warnings.warn( + msg.format("min_source_margin"), category=UserWarning, stacklevel=2 + ) + if min_target_margin != 0: + warnings.warn( + msg.format("min_target_margin"), category=UserWarning, stacklevel=2 + ) + if any(cs != "arc3" for cs in connectionstyle): + warnings.warn( + msg.format("connectionstyle"), category=UserWarning, stacklevel=2 + ) + + # NOTE: Arrowstyle modification must occur after the warnings section + if arrowstyle is None: + arrowstyle = "-|>" if G.is_directed() else "-" + + if ax is None: + ax = plt.gca() + + if edgelist is None: + edgelist = list(G.edges) # (u, v, k) for multigraph (u, v) otherwise + + if len(edgelist): + if G.is_multigraph(): + key_count = collections.defaultdict(lambda: itertools.count(0)) + edge_indices = [next(key_count[tuple(e[:2])]) for e in edgelist] + else: + edge_indices = [0] * len(edgelist) + else: # no edges! + return [] + + if nodelist is None: + nodelist = list(G.nodes()) + + # FancyArrowPatch handles color=None different from LineCollection + if edge_color is None: + edge_color = "k" + + # set edge positions + edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist]) + + # Check if edge_color is an array of floats and map to edge_cmap. + # This is the only case handled differently from matplotlib + if ( + np.iterable(edge_color) + and (len(edge_color) == len(edge_pos)) + and np.all([isinstance(c, Number) for c in edge_color]) + ): + if edge_cmap is not None: + assert isinstance(edge_cmap, mpl.colors.Colormap) + else: + edge_cmap = plt.get_cmap() + if edge_vmin is None: + edge_vmin = min(edge_color) + if edge_vmax is None: + edge_vmax = max(edge_color) + color_normal = mpl.colors.Normalize(vmin=edge_vmin, vmax=edge_vmax) + edge_color = [edge_cmap(color_normal(e)) for e in edge_color] + + # compute initial view + minx = np.amin(np.ravel(edge_pos[:, :, 0])) + maxx = np.amax(np.ravel(edge_pos[:, :, 0])) + miny = np.amin(np.ravel(edge_pos[:, :, 1])) + maxy = np.amax(np.ravel(edge_pos[:, :, 1])) + w = maxx - minx + h = maxy - miny + + # Self-loops are scaled by view extent, except in cases the extent + # is 0, e.g. for a single node. In this case, fall back to scaling + # by the maximum node size + selfloop_height = h if h != 0 else 0.005 * np.array(node_size).max() + fancy_arrow_factory = FancyArrowFactory( + edge_pos, + edgelist, + nodelist, + edge_indices, + node_size, + selfloop_height, + connectionstyle, + node_shape, + arrowstyle, + arrowsize, + edge_color, + alpha, + width, + style, + min_source_margin, + min_target_margin, + ax=ax, + ) + + # Draw the edges + if use_linecollection: + edge_collection = mpl.collections.LineCollection( + edge_pos, + colors=edge_color, + linewidths=width, + antialiaseds=(1,), + linestyle=style, + alpha=alpha, + ) + edge_collection.set_cmap(edge_cmap) + edge_collection.set_clim(edge_vmin, edge_vmax) + edge_collection.set_zorder(1) # edges go behind nodes + edge_collection.set_label(label) + ax.add_collection(edge_collection) + edge_viz_obj = edge_collection + + # Make sure selfloop edges are also drawn + # --------------------------------------- + selfloops_to_draw = [loop for loop in nx.selfloop_edges(G) if loop in edgelist] + if selfloops_to_draw: + edgelist_tuple = list(map(tuple, edgelist)) + arrow_collection = [] + for loop in selfloops_to_draw: + i = edgelist_tuple.index(loop) + arrow = fancy_arrow_factory(i) + arrow_collection.append(arrow) + ax.add_patch(arrow) + else: + edge_viz_obj = [] + for i in range(len(edgelist)): + arrow = fancy_arrow_factory(i) + ax.add_patch(arrow) + edge_viz_obj.append(arrow) + + # update view after drawing + padx, pady = 0.05 * w, 0.05 * h + corners = (minx - padx, miny - pady), (maxx + padx, maxy + pady) + ax.update_datalim(corners) + ax.autoscale_view() + + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + return edge_viz_obj + + +def draw_networkx_labels( + G, + pos, + labels=None, + font_size=12, + font_color="k", + font_family="sans-serif", + font_weight="normal", + alpha=None, + bbox=None, + horizontalalignment="center", + verticalalignment="center", + ax=None, + clip_on=True, + hide_ticks=True, +): + """Draw node labels on the graph G. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary + A dictionary with nodes as keys and positions as values. + Positions should be sequences of length 2. + + labels : dictionary (default={n: n for n in G}) + Node labels in a dictionary of text labels keyed by node. + Node-keys in labels should appear as keys in `pos`. + If needed use: `{n:lab for n,lab in labels.items() if n in pos}` + + font_size : int or dictionary of nodes to ints (default=12) + Font size for text labels. + + font_color : color or dictionary of nodes to colors (default='k' black) + Font color string. Color can be string or rgb (or rgba) tuple of + floats from 0-1. + + font_weight : string or dictionary of nodes to strings (default='normal') + Font weight. + + font_family : string or dictionary of nodes to strings (default='sans-serif') + Font family. + + alpha : float or None or dictionary of nodes to floats (default=None) + The text transparency. + + bbox : Matplotlib bbox, (default is Matplotlib's ax.text default) + Specify text box properties (e.g. shape, color etc.) for node labels. + + horizontalalignment : string or array of strings (default='center') + Horizontal alignment {'center', 'right', 'left'}. If an array is + specified it must be the same length as `nodelist`. + + verticalalignment : string (default='center') + Vertical alignment {'center', 'top', 'bottom', 'baseline', 'center_baseline'}. + If an array is specified it must be the same length as `nodelist`. + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + + clip_on : bool (default=True) + Turn on clipping of node labels at axis boundaries + + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + + Returns + ------- + dict + `dict` of labels keyed on the nodes + + Examples + -------- + >>> G = nx.dodecahedral_graph() + >>> labels = nx.draw_networkx_labels(G, pos=nx.spring_layout(G)) + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx + draw_networkx_nodes + draw_networkx_edges + draw_networkx_edge_labels + """ + import matplotlib.pyplot as plt + + if ax is None: + ax = plt.gca() + + if labels is None: + labels = {n: n for n in G.nodes()} + + individual_params = set() + + def check_individual_params(p_value, p_name): + if isinstance(p_value, dict): + if len(p_value) != len(labels): + raise ValueError(f"{p_name} must have the same length as labels.") + individual_params.add(p_name) + + def get_param_value(node, p_value, p_name): + if p_name in individual_params: + return p_value[node] + return p_value + + check_individual_params(font_size, "font_size") + check_individual_params(font_color, "font_color") + check_individual_params(font_weight, "font_weight") + check_individual_params(font_family, "font_family") + check_individual_params(alpha, "alpha") + + text_items = {} # there is no text collection so we'll fake one + for n, label in labels.items(): + (x, y) = pos[n] + if not isinstance(label, str): + label = str(label) # this makes "1" and 1 labeled the same + t = ax.text( + x, + y, + label, + size=get_param_value(n, font_size, "font_size"), + color=get_param_value(n, font_color, "font_color"), + family=get_param_value(n, font_family, "font_family"), + weight=get_param_value(n, font_weight, "font_weight"), + alpha=get_param_value(n, alpha, "alpha"), + horizontalalignment=horizontalalignment, + verticalalignment=verticalalignment, + transform=ax.transData, + bbox=bbox, + clip_on=clip_on, + ) + text_items[n] = t + + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + return text_items + + +def draw_networkx_edge_labels( + G, + pos, + edge_labels=None, + label_pos=0.5, + font_size=10, + font_color="k", + font_family="sans-serif", + font_weight="normal", + alpha=None, + bbox=None, + horizontalalignment="center", + verticalalignment="center", + ax=None, + rotate=True, + clip_on=True, + node_size=300, + nodelist=None, + connectionstyle="arc3", + hide_ticks=True, +): + """Draw edge labels. + + Parameters + ---------- + G : graph + A networkx graph + + pos : dictionary + A dictionary with nodes as keys and positions as values. + Positions should be sequences of length 2. + + edge_labels : dictionary (default=None) + Edge labels in a dictionary of labels keyed by edge two-tuple. + Only labels for the keys in the dictionary are drawn. + + label_pos : float (default=0.5) + Position of edge label along edge (0=head, 0.5=center, 1=tail) + + font_size : int (default=10) + Font size for text labels + + font_color : color (default='k' black) + Font color string. Color can be string or rgb (or rgba) tuple of + floats from 0-1. + + font_weight : string (default='normal') + Font weight + + font_family : string (default='sans-serif') + Font family + + alpha : float or None (default=None) + The text transparency + + bbox : Matplotlib bbox, optional + Specify text box properties (e.g. shape, color etc.) for edge labels. + Default is {boxstyle='round', ec=(1.0, 1.0, 1.0), fc=(1.0, 1.0, 1.0)}. + + horizontalalignment : string (default='center') + Horizontal alignment {'center', 'right', 'left'} + + verticalalignment : string (default='center') + Vertical alignment {'center', 'top', 'bottom', 'baseline', 'center_baseline'} + + ax : Matplotlib Axes object, optional + Draw the graph in the specified Matplotlib axes. + + rotate : bool (default=True) + Rotate edge labels to lie parallel to edges + + clip_on : bool (default=True) + Turn on clipping of edge labels at axis boundaries + + node_size : scalar or array (default=300) + Size of nodes. If an array it must be the same length as nodelist. + + nodelist : list, optional (default=G.nodes()) + This provides the node order for the `node_size` array (if it is an array). + + connectionstyle : string or iterable of strings (default="arc3") + Pass the connectionstyle parameter to create curved arc of rounding + radius rad. For example, connectionstyle='arc3,rad=0.2'. + See `matplotlib.patches.ConnectionStyle` and + `matplotlib.patches.FancyArrowPatch` for more info. + If Iterable, index indicates i'th edge key of MultiGraph + + hide_ticks : bool, optional + Hide ticks of axes. When `True` (the default), ticks and ticklabels + are removed from the axes. To set ticks and tick labels to the pyplot default, + use ``hide_ticks=False``. + + Returns + ------- + dict + `dict` of labels keyed by edge + + Examples + -------- + >>> G = nx.dodecahedral_graph() + >>> edge_labels = nx.draw_networkx_edge_labels(G, pos=nx.spring_layout(G)) + + Also see the NetworkX drawing examples at + https://networkx.org/documentation/latest/auto_examples/index.html + + See Also + -------- + draw + draw_networkx + draw_networkx_nodes + draw_networkx_edges + draw_networkx_labels + """ + import matplotlib as mpl + import matplotlib.pyplot as plt + import numpy as np + + class CurvedArrowText(mpl.text.Text): + def __init__( + self, + arrow, + *args, + label_pos=0.5, + labels_horizontal=False, + ax=None, + **kwargs, + ): + # Bind to FancyArrowPatch + self.arrow = arrow + # how far along the text should be on the curve, + # 0 is at start, 1 is at end etc. + self.label_pos = label_pos + self.labels_horizontal = labels_horizontal + if ax is None: + ax = plt.gca() + self.ax = ax + self.x, self.y, self.angle = self._update_text_pos_angle(arrow) + + # Create text object + super().__init__(self.x, self.y, *args, rotation=self.angle, **kwargs) + # Bind to axis + self.ax.add_artist(self) + + def _get_arrow_path_disp(self, arrow): + """ + This is part of FancyArrowPatch._get_path_in_displaycoord + It omits the second part of the method where path is converted + to polygon based on width + The transform is taken from ax, not the object, as the object + has not been added yet, and doesn't have transform + """ + dpi_cor = arrow._dpi_cor + trans_data = self.ax.transData + if arrow._posA_posB is None: + raise ValueError( + "Can only draw labels for fancy arrows with " + "posA and posB inputs, not custom path" + ) + posA = arrow._convert_xy_units(arrow._posA_posB[0]) + posB = arrow._convert_xy_units(arrow._posA_posB[1]) + (posA, posB) = trans_data.transform((posA, posB)) + _path = arrow.get_connectionstyle()( + posA, + posB, + patchA=arrow.patchA, + patchB=arrow.patchB, + shrinkA=arrow.shrinkA * dpi_cor, + shrinkB=arrow.shrinkB * dpi_cor, + ) + # Return is in display coordinates + return _path + + def _update_text_pos_angle(self, arrow): + # Fractional label position + # Text position at a proportion t along the line in display coords + # default is 0.5 so text appears at the halfway point + t = self.label_pos + tt = 1 - t + path_disp = self._get_arrow_path_disp(arrow) + is_bar_style = isinstance( + arrow.get_connectionstyle(), mpl.patches.ConnectionStyle.Bar + ) + # 1. Calculate x and y + if is_bar_style: + # Bar Connection Style - straight line + _, (cx1, cy1), (cx2, cy2), _ = path_disp.vertices + x = cx1 * tt + cx2 * t + y = cy1 * tt + cy2 * t + else: + # Arc or Angle type Connection Styles - Bezier curve + (x1, y1), (cx, cy), (x2, y2) = path_disp.vertices + x = tt**2 * x1 + 2 * t * tt * cx + t**2 * x2 + y = tt**2 * y1 + 2 * t * tt * cy + t**2 * y2 + # 2. Calculate Angle + if self.labels_horizontal: + # Horizontal text labels + angle = 0 + else: + # Labels parallel to curve + if is_bar_style: + change_x = (cx2 - cx1) / 2 + change_y = (cy2 - cy1) / 2 + else: + change_x = 2 * tt * (cx - x1) + 2 * t * (x2 - cx) + change_y = 2 * tt * (cy - y1) + 2 * t * (y2 - cy) + angle = np.arctan2(change_y, change_x) / (2 * np.pi) * 360 + # Text is "right way up" + if angle > 90: + angle -= 180 + elif angle < -90: + angle += 180 + (x, y) = self.ax.transData.inverted().transform((x, y)) + return x, y, angle + + def draw(self, renderer): + # recalculate the text position and angle + self.x, self.y, self.angle = self._update_text_pos_angle(self.arrow) + self.set_position((self.x, self.y)) + self.set_rotation(self.angle) + # redraw text + super().draw(renderer) + + # use default box of white with white border + if bbox is None: + bbox = {"boxstyle": "round", "ec": (1.0, 1.0, 1.0), "fc": (1.0, 1.0, 1.0)} + + if isinstance(connectionstyle, str): + connectionstyle = [connectionstyle] + elif np.iterable(connectionstyle): + connectionstyle = list(connectionstyle) + else: + raise nx.NetworkXError( + "draw_networkx_edges arg `connectionstyle` must be" + "string or iterable of strings" + ) + + if ax is None: + ax = plt.gca() + + if edge_labels is None: + kwds = {"keys": True} if G.is_multigraph() else {} + edge_labels = {tuple(edge): d for *edge, d in G.edges(data=True, **kwds)} + # NOTHING TO PLOT + if not edge_labels: + return {} + edgelist, labels = zip(*edge_labels.items()) + + if nodelist is None: + nodelist = list(G.nodes()) + + # set edge positions + edge_pos = np.asarray([(pos[e[0]], pos[e[1]]) for e in edgelist]) + + if G.is_multigraph(): + key_count = collections.defaultdict(lambda: itertools.count(0)) + edge_indices = [next(key_count[tuple(e[:2])]) for e in edgelist] + else: + edge_indices = [0] * len(edgelist) + + # Used to determine self loop mid-point + # Note, that this will not be accurate, + # if not drawing edge_labels for all edges drawn + h = 0 + if edge_labels: + miny = np.amin(np.ravel(edge_pos[:, :, 1])) + maxy = np.amax(np.ravel(edge_pos[:, :, 1])) + h = maxy - miny + selfloop_height = h if h != 0 else 0.005 * np.array(node_size).max() + fancy_arrow_factory = FancyArrowFactory( + edge_pos, + edgelist, + nodelist, + edge_indices, + node_size, + selfloop_height, + connectionstyle, + ax=ax, + ) + + individual_params = {} + + def check_individual_params(p_value, p_name): + # TODO should this be list or array (as in a numpy array)? + if isinstance(p_value, list): + if len(p_value) != len(edgelist): + raise ValueError(f"{p_name} must have the same length as edgelist.") + individual_params[p_name] = p_value.iter() + + # Don't need to pass in an edge because these are lists, not dicts + def get_param_value(p_value, p_name): + if p_name in individual_params: + return next(individual_params[p_name]) + return p_value + + check_individual_params(font_size, "font_size") + check_individual_params(font_color, "font_color") + check_individual_params(font_weight, "font_weight") + check_individual_params(alpha, "alpha") + check_individual_params(horizontalalignment, "horizontalalignment") + check_individual_params(verticalalignment, "verticalalignment") + check_individual_params(rotate, "rotate") + check_individual_params(label_pos, "label_pos") + + text_items = {} + for i, (edge, label) in enumerate(zip(edgelist, labels)): + if not isinstance(label, str): + label = str(label) # this makes "1" and 1 labeled the same + + n1, n2 = edge[:2] + arrow = fancy_arrow_factory(i) + if n1 == n2: + connectionstyle_obj = arrow.get_connectionstyle() + posA = ax.transData.transform(pos[n1]) + path_disp = connectionstyle_obj(posA, posA) + path_data = ax.transData.inverted().transform_path(path_disp) + x, y = path_data.vertices[0] + text_items[edge] = ax.text( + x, + y, + label, + size=get_param_value(font_size, "font_size"), + color=get_param_value(font_color, "font_color"), + family=get_param_value(font_family, "font_family"), + weight=get_param_value(font_weight, "font_weight"), + alpha=get_param_value(alpha, "alpha"), + horizontalalignment=get_param_value( + horizontalalignment, "horizontalalignment" + ), + verticalalignment=get_param_value( + verticalalignment, "verticalalignment" + ), + rotation=0, + transform=ax.transData, + bbox=bbox, + zorder=1, + clip_on=clip_on, + ) + else: + text_items[edge] = CurvedArrowText( + arrow, + label, + size=get_param_value(font_size, "font_size"), + color=get_param_value(font_color, "font_color"), + family=get_param_value(font_family, "font_family"), + weight=get_param_value(font_weight, "font_weight"), + alpha=get_param_value(alpha, "alpha"), + horizontalalignment=get_param_value( + horizontalalignment, "horizontalalignment" + ), + verticalalignment=get_param_value( + verticalalignment, "verticalalignment" + ), + transform=ax.transData, + bbox=bbox, + zorder=1, + clip_on=clip_on, + label_pos=get_param_value(label_pos, "label_pos"), + labels_horizontal=not get_param_value(rotate, "rotate"), + ax=ax, + ) + + if hide_ticks: + ax.tick_params( + axis="both", + which="both", + bottom=False, + left=False, + labelbottom=False, + labelleft=False, + ) + + return text_items + + +def draw_bipartite(G, **kwargs): + """Draw the graph `G` with a bipartite layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.bipartite_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Raises + ------ + NetworkXError : + If `G` is not bipartite. + + Notes + ----- + The layout is computed each time this function is called. For + repeated drawing it is much more efficient to call + `~networkx.drawing.layout.bipartite_layout` directly and reuse the result:: + + >>> G = nx.complete_bipartite_graph(3, 3) + >>> pos = nx.bipartite_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.complete_bipartite_graph(2, 5) + >>> nx.draw_bipartite(G) + + See Also + -------- + :func:`~networkx.drawing.layout.bipartite_layout` + """ + draw(G, pos=nx.bipartite_layout(G), **kwargs) + + +def draw_circular(G, **kwargs): + """Draw the graph `G` with a circular layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.circular_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. For + repeated drawing it is much more efficient to call + `~networkx.drawing.layout.circular_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.circular_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.draw_circular(G) + + See Also + -------- + :func:`~networkx.drawing.layout.circular_layout` + """ + draw(G, pos=nx.circular_layout(G), **kwargs) + + +def draw_kamada_kawai(G, **kwargs): + """Draw the graph `G` with a Kamada-Kawai force-directed layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.kamada_kawai_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.kamada_kawai_layout` directly and reuse the + result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.kamada_kawai_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.draw_kamada_kawai(G) + + See Also + -------- + :func:`~networkx.drawing.layout.kamada_kawai_layout` + """ + draw(G, pos=nx.kamada_kawai_layout(G), **kwargs) + + +def draw_random(G, **kwargs): + """Draw the graph `G` with a random layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.random_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.random_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.random_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.lollipop_graph(4, 3) + >>> nx.draw_random(G) + + See Also + -------- + :func:`~networkx.drawing.layout.random_layout` + """ + draw(G, pos=nx.random_layout(G), **kwargs) + + +def draw_spectral(G, **kwargs): + """Draw the graph `G` with a spectral 2D layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.spectral_layout(G), **kwargs) + + For more information about how node positions are determined, see + `~networkx.drawing.layout.spectral_layout`. + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.spectral_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.spectral_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.draw_spectral(G) + + See Also + -------- + :func:`~networkx.drawing.layout.spectral_layout` + """ + draw(G, pos=nx.spectral_layout(G), **kwargs) + + +def draw_spring(G, **kwargs): + """Draw the graph `G` with a spring layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.spring_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + `~networkx.drawing.layout.spring_layout` is also the default layout for + `draw`, so this function is equivalent to `draw`. + + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.spring_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.spring_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(20) + >>> nx.draw_spring(G) + + See Also + -------- + draw + :func:`~networkx.drawing.layout.spring_layout` + """ + draw(G, pos=nx.spring_layout(G), **kwargs) + + +def draw_shell(G, nlist=None, **kwargs): + """Draw networkx graph `G` with shell layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.shell_layout(G, nlist=nlist), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + nlist : list of list of nodes, optional + A list containing lists of nodes representing the shells. + Default is `None`, meaning all nodes are in a single shell. + See `~networkx.drawing.layout.shell_layout` for details. + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.shell_layout` directly and reuse the result:: + + >>> G = nx.complete_graph(5) + >>> pos = nx.shell_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(4) + >>> shells = [[0], [1, 2, 3]] + >>> nx.draw_shell(G, nlist=shells) + + See Also + -------- + :func:`~networkx.drawing.layout.shell_layout` + """ + draw(G, pos=nx.shell_layout(G, nlist=nlist), **kwargs) + + +def draw_planar(G, **kwargs): + """Draw a planar networkx graph `G` with planar layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.planar_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A planar networkx graph + + kwargs : optional keywords + See `draw_networkx` for a description of optional keywords. + + Raises + ------ + NetworkXException + When `G` is not planar + + Notes + ----- + The layout is computed each time this function is called. + For repeated drawing it is much more efficient to call + `~networkx.drawing.layout.planar_layout` directly and reuse the result:: + + >>> G = nx.path_graph(5) + >>> pos = nx.planar_layout(G) + >>> nx.draw(G, pos=pos) # Draw the original graph + >>> # Draw a subgraph, reusing the same node positions + >>> nx.draw(G.subgraph([0, 1, 2]), pos=pos, node_color="red") + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.draw_planar(G) + + See Also + -------- + :func:`~networkx.drawing.layout.planar_layout` + """ + draw(G, pos=nx.planar_layout(G), **kwargs) + + +def draw_forceatlas2(G, **kwargs): + """Draw a networkx graph with forceatlas2 layout. + + This is a convenience function equivalent to:: + + nx.draw(G, pos=nx.forceatlas2_layout(G), **kwargs) + + Parameters + ---------- + G : graph + A networkx graph + + kwargs : optional keywords + See networkx.draw_networkx() for a description of optional keywords, + with the exception of the pos parameter which is not used by this + function. + """ + draw(G, pos=nx.forceatlas2_layout(G), **kwargs) + + +def apply_alpha(colors, alpha, elem_list, cmap=None, vmin=None, vmax=None): + """Apply an alpha (or list of alphas) to the colors provided. + + Parameters + ---------- + + colors : color string or array of floats (default='r') + Color of element. Can be a single color format string, + or a sequence of colors with the same length as nodelist. + If numeric values are specified they will be mapped to + colors using the cmap and vmin,vmax parameters. See + matplotlib.scatter for more details. + + alpha : float or array of floats + Alpha values for elements. This can be a single alpha value, in + which case it will be applied to all the elements of color. Otherwise, + if it is an array, the elements of alpha will be applied to the colors + in order (cycling through alpha multiple times if necessary). + + elem_list : array of networkx objects + The list of elements which are being colored. These could be nodes, + edges or labels. + + cmap : matplotlib colormap + Color map for use if colors is a list of floats corresponding to points + on a color mapping. + + vmin, vmax : float + Minimum and maximum values for normalizing colors if a colormap is used + + Returns + ------- + + rgba_colors : numpy ndarray + Array containing RGBA format values for each of the node colours. + + """ + from itertools import cycle, islice + + import matplotlib as mpl + import matplotlib.cm # call as mpl.cm + import matplotlib.colors # call as mpl.colors + import numpy as np + + # If we have been provided with a list of numbers as long as elem_list, + # apply the color mapping. + if len(colors) == len(elem_list) and isinstance(colors[0], Number): + mapper = mpl.cm.ScalarMappable(cmap=cmap) + mapper.set_clim(vmin, vmax) + rgba_colors = mapper.to_rgba(colors) + # Otherwise, convert colors to matplotlib's RGB using the colorConverter + # object. These are converted to numpy ndarrays to be consistent with the + # to_rgba method of ScalarMappable. + else: + try: + rgba_colors = np.array([mpl.colors.colorConverter.to_rgba(colors)]) + except ValueError: + rgba_colors = np.array( + [mpl.colors.colorConverter.to_rgba(color) for color in colors] + ) + # Set the final column of the rgba_colors to have the relevant alpha values + try: + # If alpha is longer than the number of colors, resize to the number of + # elements. Also, if rgba_colors.size (the number of elements of + # rgba_colors) is the same as the number of elements, resize the array, + # to avoid it being interpreted as a colormap by scatter() + if len(alpha) > len(rgba_colors) or rgba_colors.size == len(elem_list): + rgba_colors = np.resize(rgba_colors, (len(elem_list), 4)) + rgba_colors[1:, 0] = rgba_colors[0, 0] + rgba_colors[1:, 1] = rgba_colors[0, 1] + rgba_colors[1:, 2] = rgba_colors[0, 2] + rgba_colors[:, 3] = list(islice(cycle(alpha), len(rgba_colors))) + except TypeError: + rgba_colors[:, -1] = alpha + return rgba_colors diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2f6da87 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_agraph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_agraph.cpython-312.pyc new file mode 100644 index 0000000..67dfbd2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_agraph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_latex.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_latex.cpython-312.pyc new file mode 100644 index 0000000..2145144 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_latex.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_layout.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_layout.cpython-312.pyc new file mode 100644 index 0000000..fc6b408 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_layout.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_pydot.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_pydot.cpython-312.pyc new file mode 100644 index 0000000..79a2816 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_pydot.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_pylab.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_pylab.cpython-312.pyc new file mode 100644 index 0000000..515a191 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/__pycache__/test_pylab.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_complex.png b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_complex.png new file mode 100644 index 0000000..e4b34e8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_complex.png differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_empty_graph.png b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_empty_graph.png new file mode 100644 index 0000000..5d2f4e7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_empty_graph.png differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_house_with_colors.png b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_house_with_colors.png new file mode 100644 index 0000000..08862d7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_house_with_colors.png differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_labels_and_colors.png b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_labels_and_colors.png new file mode 100644 index 0000000..f82737d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_labels_and_colors.png differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_shortest_path.png b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_shortest_path.png new file mode 100644 index 0000000..87394a1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_display_shortest_path.png differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_house_with_colors.png b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_house_with_colors.png new file mode 100644 index 0000000..31f4962 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/baseline/test_house_with_colors.png differ diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_agraph.py b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_agraph.py new file mode 100644 index 0000000..f580ad9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_agraph.py @@ -0,0 +1,240 @@ +"""Unit tests for PyGraphviz interface.""" + +import warnings + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, graphs_equal, nodes_equal + +pygraphviz = pytest.importorskip("pygraphviz") + + +class TestAGraph: + def build_graph(self, G): + edges = [("A", "B"), ("A", "C"), ("A", "C"), ("B", "C"), ("A", "D")] + G.add_edges_from(edges) + G.add_node("E") + G.graph["metal"] = "bronze" + return G + + def assert_equal(self, G1, G2): + assert nodes_equal(G1.nodes(), G2.nodes()) + assert edges_equal(G1.edges(), G2.edges()) + assert G1.graph["metal"] == G2.graph["metal"] + + @pytest.mark.parametrize( + "G", (nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph()) + ) + def test_agraph_roundtripping(self, G, tmp_path): + G = self.build_graph(G) + A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A) + self.assert_equal(G, H) + + fname = tmp_path / "test.dot" + nx.drawing.nx_agraph.write_dot(H, fname) + Hin = nx.nx_agraph.read_dot(fname) + self.assert_equal(H, Hin) + + fname = tmp_path / "fh_test.dot" + with open(fname, "w") as fh: + nx.drawing.nx_agraph.write_dot(H, fh) + + with open(fname) as fh: + Hin = nx.nx_agraph.read_dot(fh) + self.assert_equal(H, Hin) + + def test_from_agraph_name(self): + G = nx.Graph(name="test") + A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A) + assert G.name == "test" + + @pytest.mark.parametrize( + "graph_class", (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) + ) + def test_from_agraph_create_using(self, graph_class): + G = nx.path_graph(3) + A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A, create_using=graph_class) + assert isinstance(H, graph_class) + + def test_from_agraph_named_edges(self): + # Create an AGraph from an existing (non-multi) Graph + G = nx.Graph() + G.add_nodes_from([0, 1]) + A = nx.nx_agraph.to_agraph(G) + # Add edge (+ name, given by key) to the AGraph + A.add_edge(0, 1, key="foo") + # Verify a.name roundtrips out to 'key' in from_agraph + H = nx.nx_agraph.from_agraph(A) + assert isinstance(H, nx.Graph) + assert ("0", "1", {"key": "foo"}) in H.edges(data=True) + + def test_to_agraph_with_nodedata(self): + G = nx.Graph() + G.add_node(1, color="red") + A = nx.nx_agraph.to_agraph(G) + assert dict(A.nodes()[0].attr) == {"color": "red"} + + @pytest.mark.parametrize("graph_class", (nx.Graph, nx.MultiGraph)) + def test_to_agraph_with_edgedata(self, graph_class): + G = graph_class() + G.add_nodes_from([0, 1]) + G.add_edge(0, 1, color="yellow") + A = nx.nx_agraph.to_agraph(G) + assert dict(A.edges()[0].attr) == {"color": "yellow"} + + def test_view_pygraphviz_path(self, tmp_path): + G = nx.complete_graph(3) + input_path = str(tmp_path / "graph.png") + out_path, A = nx.nx_agraph.view_pygraphviz(G, path=input_path, show=False) + assert out_path == input_path + # Ensure file is not empty + with open(input_path, "rb") as fh: + data = fh.read() + assert len(data) > 0 + + def test_view_pygraphviz_file_suffix(self, tmp_path): + G = nx.complete_graph(3) + path, A = nx.nx_agraph.view_pygraphviz(G, suffix=1, show=False) + assert path[-6:] == "_1.png" + + def test_view_pygraphviz(self): + G = nx.Graph() # "An empty graph cannot be drawn." + pytest.raises(nx.NetworkXException, nx.nx_agraph.view_pygraphviz, G) + G = nx.barbell_graph(4, 6) + nx.nx_agraph.view_pygraphviz(G, show=False) + + def test_view_pygraphviz_edgelabel(self): + G = nx.Graph() + G.add_edge(1, 2, weight=7) + G.add_edge(2, 3, weight=8) + path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel="weight", show=False) + for edge in A.edges(): + assert edge.attr["weight"] in ("7", "8") + + def test_view_pygraphviz_callable_edgelabel(self): + G = nx.complete_graph(3) + + def foo_label(data): + return "foo" + + path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel=foo_label, show=False) + for edge in A.edges(): + assert edge.attr["label"] == "foo" + + def test_view_pygraphviz_multigraph_edgelabels(self): + G = nx.MultiGraph() + G.add_edge(0, 1, key=0, name="left_fork") + G.add_edge(0, 1, key=1, name="right_fork") + path, A = nx.nx_agraph.view_pygraphviz(G, edgelabel="name", show=False) + edges = A.edges() + assert len(edges) == 2 + for edge in edges: + assert edge.attr["label"].strip() in ("left_fork", "right_fork") + + def test_graph_with_reserved_keywords(self): + # test attribute/keyword clash case for #1582 + # node: n + # edges: u,v + G = nx.Graph() + G = self.build_graph(G) + G.nodes["E"]["n"] = "keyword" + G.edges[("A", "B")]["u"] = "keyword" + G.edges[("A", "B")]["v"] = "keyword" + A = nx.nx_agraph.to_agraph(G) + + def test_view_pygraphviz_no_added_attrs_to_input(self): + G = nx.complete_graph(2) + path, A = nx.nx_agraph.view_pygraphviz(G, show=False) + assert G.graph == {} + + @pytest.mark.xfail(reason="known bug in clean_attrs") + def test_view_pygraphviz_leaves_input_graph_unmodified(self): + G = nx.complete_graph(2) + # Add entries to graph dict that to_agraph handles specially + G.graph["node"] = {"width": "0.80"} + G.graph["edge"] = {"fontsize": "14"} + path, A = nx.nx_agraph.view_pygraphviz(G, show=False) + assert G.graph == {"node": {"width": "0.80"}, "edge": {"fontsize": "14"}} + + def test_graph_with_AGraph_attrs(self): + G = nx.complete_graph(2) + # Add entries to graph dict that to_agraph handles specially + G.graph["node"] = {"width": "0.80"} + G.graph["edge"] = {"fontsize": "14"} + path, A = nx.nx_agraph.view_pygraphviz(G, show=False) + # Ensure user-specified values are not lost + assert dict(A.node_attr)["width"] == "0.80" + assert dict(A.edge_attr)["fontsize"] == "14" + + def test_round_trip_empty_graph(self): + G = nx.Graph() + A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A) + # assert graphs_equal(G, H) + AA = nx.nx_agraph.to_agraph(H) + HH = nx.nx_agraph.from_agraph(AA) + assert graphs_equal(H, HH) + G.graph["graph"] = {} + G.graph["node"] = {} + G.graph["edge"] = {} + assert graphs_equal(G, HH) + + @pytest.mark.xfail(reason="integer->string node conversion in round trip") + def test_round_trip_integer_nodes(self): + G = nx.complete_graph(3) + A = nx.nx_agraph.to_agraph(G) + H = nx.nx_agraph.from_agraph(A) + assert graphs_equal(G, H) + + def test_graphviz_alias(self): + G = self.build_graph(nx.Graph()) + pos_graphviz = nx.nx_agraph.graphviz_layout(G) + pos_pygraphviz = nx.nx_agraph.pygraphviz_layout(G) + assert pos_graphviz == pos_pygraphviz + + @pytest.mark.parametrize("root", range(5)) + def test_pygraphviz_layout_root(self, root): + # NOTE: test depends on layout prog being deterministic + G = nx.complete_graph(5) + A = nx.nx_agraph.to_agraph(G) + # Get layout with root arg is not None + pygv_layout = nx.nx_agraph.pygraphviz_layout(G, prog="circo", root=root) + # Equivalent layout directly on AGraph + A.layout(args=f"-Groot={root}", prog="circo") + # Parse AGraph layout + a1_pos = tuple(float(v) for v in dict(A.get_node("1").attr)["pos"].split(",")) + assert pygv_layout[1] == a1_pos + + def test_2d_layout(self): + G = nx.Graph() + G = self.build_graph(G) + G.graph["dimen"] = 2 + pos = nx.nx_agraph.pygraphviz_layout(G, prog="neato") + pos = list(pos.values()) + assert len(pos) == 5 + assert len(pos[0]) == 2 + + def test_3d_layout(self): + G = nx.Graph() + G = self.build_graph(G) + G.graph["dimen"] = 3 + pos = nx.nx_agraph.pygraphviz_layout(G, prog="neato") + pos = list(pos.values()) + assert len(pos) == 5 + assert len(pos[0]) == 3 + + def test_no_warnings_raised(self): + # Test that no warnings are raised when Networkx graph + # is converted to Pygraphviz graph and 'pos' + # attribute is given + G = nx.Graph() + G.add_node(0, pos=(0, 0)) + G.add_node(1, pos=(1, 1)) + A = nx.nx_agraph.to_agraph(G) + with warnings.catch_warnings(record=True) as record: + A.layout() + assert len(record) == 0 diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_latex.py b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_latex.py new file mode 100644 index 0000000..6ec9b07 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_latex.py @@ -0,0 +1,285 @@ +import pytest + +import networkx as nx + + +def test_tikz_attributes(): + G = nx.path_graph(4, create_using=nx.DiGraph) + pos = {n: (n, n) for n in G} + + G.add_edge(0, 0) + G.edges[(0, 0)]["label"] = "Loop" + G.edges[(0, 0)]["label_options"] = "midway" + + G.nodes[0]["style"] = "blue" + G.nodes[1]["style"] = "line width=3,draw" + G.nodes[2]["style"] = "circle,draw,blue!50" + G.nodes[3]["label"] = "Stop" + G.edges[(0, 1)]["label"] = "1st Step" + G.edges[(0, 1)]["label_options"] = "near end" + G.edges[(2, 3)]["label"] = "3rd Step" + G.edges[(2, 3)]["label_options"] = "near start" + G.edges[(2, 3)]["style"] = "bend left,green" + G.edges[(1, 2)]["label"] = "2nd" + G.edges[(1, 2)]["label_options"] = "pos=0.5" + G.edges[(1, 2)]["style"] = ">->,bend right,line width=3,green!90" + + output_tex = nx.to_latex( + G, + pos=pos, + as_document=False, + tikz_options="[scale=3]", + node_options="style", + edge_options="style", + node_label="label", + edge_label="label", + edge_label_options="label_options", + ) + expected_tex = r"""\begin{figure} + \begin{tikzpicture}[scale=3] + \draw + (0, 0) node[blue] (0){0} + (1, 1) node[line width=3,draw] (1){1} + (2, 2) node[circle,draw,blue!50] (2){2} + (3, 3) node (3){Stop}; + \begin{scope}[->] + \draw (0) to node[near end] {1st Step} (1); + \draw[loop,] (0) to node[midway] {Loop} (0); + \draw[>->,bend right,line width=3,green!90] (1) to node[pos=0.5] {2nd} (2); + \draw[bend left,green] (2) to node[near start] {3rd Step} (3); + \end{scope} + \end{tikzpicture} +\end{figure}""" + + # First, check for consistency line-by-line - if this fails, the mismatched + # line will be shown explicitly in the failure summary + for expected, actual in zip(expected_tex.split("\n"), output_tex.split("\n")): + assert expected == actual + + assert output_tex == expected_tex + + +def test_basic_multiple_graphs(): + H1 = nx.path_graph(4) + H2 = nx.complete_graph(4) + H3 = nx.path_graph(8) + H4 = nx.complete_graph(8) + captions = [ + "Path on 4 nodes", + "Complete graph on 4 nodes", + "Path on 8 nodes", + "Complete graph on 8 nodes", + ] + labels = ["fig2a", "fig2b", "fig2c", "fig2d"] + latex_code = nx.to_latex( + [H1, H2, H3, H4], + n_rows=2, + sub_captions=captions, + sub_labels=labels, + ) + assert "begin{document}" in latex_code + assert "begin{figure}" in latex_code + assert latex_code.count("begin{subfigure}") == 4 + assert latex_code.count("tikzpicture") == 8 + assert latex_code.count("[-]") == 4 + + +def test_basic_tikz(): + expected_tex = r"""\documentclass{report} +\usepackage{tikz} +\usepackage{subcaption} + +\begin{document} +\begin{figure} + \begin{subfigure}{0.5\textwidth} + \begin{tikzpicture}[scale=2] + \draw[gray!90] + (0.749, 0.702) node[red!90] (0){0} + (1.0, -0.014) node[red!90] (1){1} + (-0.777, -0.705) node (2){2} + (-0.984, 0.042) node (3){3} + (-0.028, 0.375) node[cyan!90] (4){4} + (-0.412, 0.888) node (5){5} + (0.448, -0.856) node (6){6} + (0.003, -0.431) node[cyan!90] (7){7}; + \begin{scope}[->,gray!90] + \draw (0) to (4); + \draw (0) to (5); + \draw (0) to (6); + \draw (0) to (7); + \draw (1) to (4); + \draw (1) to (5); + \draw (1) to (6); + \draw (1) to (7); + \draw (2) to (4); + \draw (2) to (5); + \draw (2) to (6); + \draw (2) to (7); + \draw (3) to (4); + \draw (3) to (5); + \draw (3) to (6); + \draw (3) to (7); + \end{scope} + \end{tikzpicture} + \caption{My tikz number 1 of 2}\label{tikz_1_2} + \end{subfigure} + \begin{subfigure}{0.5\textwidth} + \begin{tikzpicture}[scale=2] + \draw[gray!90] + (0.749, 0.702) node[green!90] (0){0} + (1.0, -0.014) node[green!90] (1){1} + (-0.777, -0.705) node (2){2} + (-0.984, 0.042) node (3){3} + (-0.028, 0.375) node[purple!90] (4){4} + (-0.412, 0.888) node (5){5} + (0.448, -0.856) node (6){6} + (0.003, -0.431) node[purple!90] (7){7}; + \begin{scope}[->,gray!90] + \draw (0) to (4); + \draw (0) to (5); + \draw (0) to (6); + \draw (0) to (7); + \draw (1) to (4); + \draw (1) to (5); + \draw (1) to (6); + \draw (1) to (7); + \draw (2) to (4); + \draw (2) to (5); + \draw (2) to (6); + \draw (2) to (7); + \draw (3) to (4); + \draw (3) to (5); + \draw (3) to (6); + \draw (3) to (7); + \end{scope} + \end{tikzpicture} + \caption{My tikz number 2 of 2}\label{tikz_2_2} + \end{subfigure} + \caption{A graph generated with python and latex.} +\end{figure} +\end{document}""" + + edges = [ + (0, 4), + (0, 5), + (0, 6), + (0, 7), + (1, 4), + (1, 5), + (1, 6), + (1, 7), + (2, 4), + (2, 5), + (2, 6), + (2, 7), + (3, 4), + (3, 5), + (3, 6), + (3, 7), + ] + G = nx.DiGraph() + G.add_nodes_from(range(8)) + G.add_edges_from(edges) + pos = { + 0: (0.7490296171687696, 0.702353520257394), + 1: (1.0, -0.014221357723796535), + 2: (-0.7765783344161441, -0.7054170966808919), + 3: (-0.9842690223417624, 0.04177547602465483), + 4: (-0.02768523817180917, 0.3745724439551441), + 5: (-0.41154855146767433, 0.8880106515525136), + 6: (0.44780153389148264, -0.8561492709269164), + 7: (0.0032499953371383505, -0.43092436645809945), + } + + rc_node_color = {0: "red!90", 1: "red!90", 4: "cyan!90", 7: "cyan!90"} + gp_node_color = {0: "green!90", 1: "green!90", 4: "purple!90", 7: "purple!90"} + + H = G.copy() + nx.set_node_attributes(G, rc_node_color, "color") + nx.set_node_attributes(H, gp_node_color, "color") + + sub_captions = ["My tikz number 1 of 2", "My tikz number 2 of 2"] + sub_labels = ["tikz_1_2", "tikz_2_2"] + + output_tex = nx.to_latex( + [G, H], + [pos, pos], + tikz_options="[scale=2]", + default_node_options="gray!90", + default_edge_options="gray!90", + node_options="color", + sub_captions=sub_captions, + sub_labels=sub_labels, + caption="A graph generated with python and latex.", + n_rows=2, + as_document=True, + ) + + # First, check for consistency line-by-line - if this fails, the mismatched + # line will be shown explicitly in the failure summary + for expected, actual in zip(expected_tex.split("\n"), output_tex.split("\n")): + assert expected == actual + # Double-check for document-level consistency + assert output_tex == expected_tex + + +def test_exception_pos_single_graph(to_latex=nx.to_latex): + # smoke test that pos can be a string + G = nx.path_graph(4) + to_latex(G, pos="pos") + + # must include all nodes + pos = {0: (1, 2), 1: (0, 1), 2: (2, 1)} + with pytest.raises(nx.NetworkXError): + to_latex(G, pos) + + # must have 2 values + pos[3] = (1, 2, 3) + with pytest.raises(nx.NetworkXError): + to_latex(G, pos) + pos[3] = 2 + with pytest.raises(nx.NetworkXError): + to_latex(G, pos) + + # check that passes with 2 values + pos[3] = (3, 2) + to_latex(G, pos) + + +def test_exception_multiple_graphs(to_latex=nx.to_latex): + G = nx.path_graph(3) + pos_bad = {0: (1, 2), 1: (0, 1)} + pos_OK = {0: (1, 2), 1: (0, 1), 2: (2, 1)} + fourG = [G, G, G, G] + fourpos = [pos_OK, pos_OK, pos_OK, pos_OK] + + # input single dict to use for all graphs + to_latex(fourG, pos_OK) + with pytest.raises(nx.NetworkXError): + to_latex(fourG, pos_bad) + + # input list of dicts to use for all graphs + to_latex(fourG, fourpos) + with pytest.raises(nx.NetworkXError): + to_latex(fourG, [pos_bad, pos_bad, pos_bad, pos_bad]) + + # every pos dict must include all nodes + with pytest.raises(nx.NetworkXError): + to_latex(fourG, [pos_OK, pos_OK, pos_bad, pos_OK]) + + # test sub_captions and sub_labels (len must match Gbunch) + with pytest.raises(nx.NetworkXError): + to_latex(fourG, fourpos, sub_captions=["hi", "hi"]) + + with pytest.raises(nx.NetworkXError): + to_latex(fourG, fourpos, sub_labels=["hi", "hi"]) + + # all pass + to_latex(fourG, fourpos, sub_captions=["hi"] * 4, sub_labels=["lbl"] * 4) + + +def test_exception_multigraph(): + G = nx.path_graph(4, create_using=nx.MultiGraph) + G.add_edge(1, 2) + with pytest.raises(nx.NetworkXNotImplemented): + nx.to_latex(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_layout.py b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_layout.py new file mode 100644 index 0000000..34d71ef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_layout.py @@ -0,0 +1,631 @@ +"""Unit tests for layout functions.""" + +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +class TestLayout: + @classmethod + def setup_class(cls): + cls.Gi = nx.grid_2d_graph(5, 5) + cls.Gs = nx.Graph() + nx.add_path(cls.Gs, "abcdef") + cls.bigG = nx.grid_2d_graph(25, 25) # > 500 nodes for sparse + + def test_spring_fixed_without_pos(self): + G = nx.path_graph(4) + # No pos dict at all + with pytest.raises(ValueError, match="nodes are fixed without positions"): + nx.spring_layout(G, fixed=[0]) + + pos = {0: (1, 1), 2: (0, 0)} + # Node 1 not in pos dict + with pytest.raises(ValueError, match="nodes are fixed without positions"): + nx.spring_layout(G, fixed=[0, 1], pos=pos) + + # All fixed nodes in pos dict + out = nx.spring_layout(G, fixed=[0, 2], pos=pos) # No ValueError + assert all(np.array_equal(out[n], pos[n]) for n in (0, 2)) + + def test_spring_init_pos(self): + # Tests GH #2448 + import math + + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (2, 0), (2, 3)]) + + init_pos = {0: (0.0, 0.0)} + fixed_pos = [0] + pos = nx.fruchterman_reingold_layout(G, pos=init_pos, fixed=fixed_pos) + has_nan = any(math.isnan(c) for coords in pos.values() for c in coords) + assert not has_nan, "values should not be nan" + + def test_smoke_empty_graph(self): + G = [] + nx.random_layout(G) + nx.circular_layout(G) + nx.planar_layout(G) + nx.spring_layout(G) + nx.fruchterman_reingold_layout(G) + nx.spectral_layout(G) + nx.shell_layout(G) + nx.bipartite_layout(G, G) + nx.spiral_layout(G) + nx.multipartite_layout(G) + nx.kamada_kawai_layout(G) + + def test_smoke_int(self): + G = self.Gi + nx.random_layout(G) + nx.circular_layout(G) + nx.planar_layout(G) + nx.spring_layout(G) + nx.forceatlas2_layout(G) + nx.fruchterman_reingold_layout(G) + nx.fruchterman_reingold_layout(self.bigG) + nx.spectral_layout(G) + nx.spectral_layout(G.to_directed()) + nx.spectral_layout(self.bigG) + nx.spectral_layout(self.bigG.to_directed()) + nx.shell_layout(G) + nx.spiral_layout(G) + nx.kamada_kawai_layout(G) + nx.kamada_kawai_layout(G, dim=1) + nx.kamada_kawai_layout(G, dim=3) + nx.arf_layout(G) + + def test_smoke_string(self): + G = self.Gs + nx.random_layout(G) + nx.circular_layout(G) + nx.planar_layout(G) + nx.spring_layout(G) + nx.forceatlas2_layout(G) + nx.fruchterman_reingold_layout(G) + nx.spectral_layout(G) + nx.shell_layout(G) + nx.spiral_layout(G) + nx.kamada_kawai_layout(G) + nx.kamada_kawai_layout(G, dim=1) + nx.kamada_kawai_layout(G, dim=3) + nx.arf_layout(G) + + def check_scale_and_center(self, pos, scale, center): + center = np.array(center) + low = center - scale + hi = center + scale + vpos = np.array(list(pos.values())) + length = vpos.max(0) - vpos.min(0) + assert (length <= 2 * scale).all() + assert (vpos >= low).all() + assert (vpos <= hi).all() + + def test_scale_and_center_arg(self): + sc = self.check_scale_and_center + c = (4, 5) + G = nx.complete_graph(9) + G.add_node(9) + sc(nx.random_layout(G, center=c), scale=0.5, center=(4.5, 5.5)) + # rest can have 2*scale length: [-scale, scale] + sc(nx.spring_layout(G, scale=2, center=c), scale=2, center=c) + sc(nx.spectral_layout(G, scale=2, center=c), scale=2, center=c) + sc(nx.circular_layout(G, scale=2, center=c), scale=2, center=c) + sc(nx.shell_layout(G, scale=2, center=c), scale=2, center=c) + sc(nx.spiral_layout(G, scale=2, center=c), scale=2, center=c) + sc(nx.kamada_kawai_layout(G, scale=2, center=c), scale=2, center=c) + + c = (2, 3, 5) + sc(nx.kamada_kawai_layout(G, dim=3, scale=2, center=c), scale=2, center=c) + + def test_planar_layout_non_planar_input(self): + G = nx.complete_graph(9) + pytest.raises(nx.NetworkXException, nx.planar_layout, G) + + def test_smoke_planar_layout_embedding_input(self): + embedding = nx.PlanarEmbedding() + embedding.set_data({0: [1, 2], 1: [0, 2], 2: [0, 1]}) + nx.planar_layout(embedding) + + def test_default_scale_and_center(self): + sc = self.check_scale_and_center + c = (0, 0) + G = nx.complete_graph(9) + G.add_node(9) + sc(nx.random_layout(G), scale=0.5, center=(0.5, 0.5)) + sc(nx.spring_layout(G), scale=1, center=c) + sc(nx.spectral_layout(G), scale=1, center=c) + sc(nx.circular_layout(G), scale=1, center=c) + sc(nx.shell_layout(G), scale=1, center=c) + sc(nx.spiral_layout(G), scale=1, center=c) + sc(nx.kamada_kawai_layout(G), scale=1, center=c) + + c = (0, 0, 0) + sc(nx.kamada_kawai_layout(G, dim=3), scale=1, center=c) + + def test_circular_planar_and_shell_dim_error(self): + G = nx.path_graph(4) + pytest.raises(ValueError, nx.circular_layout, G, dim=1) + pytest.raises(ValueError, nx.shell_layout, G, dim=1) + pytest.raises(ValueError, nx.shell_layout, G, dim=3) + pytest.raises(ValueError, nx.planar_layout, G, dim=1) + pytest.raises(ValueError, nx.planar_layout, G, dim=3) + + def test_adjacency_interface_numpy(self): + A = nx.to_numpy_array(self.Gs) + pos = nx.drawing.layout._fruchterman_reingold(A) + assert pos.shape == (6, 2) + pos = nx.drawing.layout._fruchterman_reingold(A, dim=3) + assert pos.shape == (6, 3) + pos = nx.drawing.layout._sparse_fruchterman_reingold(A) + assert pos.shape == (6, 2) + + def test_adjacency_interface_scipy(self): + A = nx.to_scipy_sparse_array(self.Gs, dtype="d") + pos = nx.drawing.layout._sparse_fruchterman_reingold(A) + assert pos.shape == (6, 2) + pos = nx.drawing.layout._sparse_spectral(A) + assert pos.shape == (6, 2) + pos = nx.drawing.layout._sparse_fruchterman_reingold(A, dim=3) + assert pos.shape == (6, 3) + + def test_single_nodes(self): + G = nx.path_graph(1) + vpos = nx.shell_layout(G) + assert not vpos[0].any() + G = nx.path_graph(4) + vpos = nx.shell_layout(G, [[0], [1, 2], [3]]) + assert not vpos[0].any() + assert vpos[3].any() # ensure node 3 not at origin (#3188) + assert np.linalg.norm(vpos[3]) <= 1 # ensure node 3 fits (#3753) + vpos = nx.shell_layout(G, [[0], [1, 2], [3]], rotate=0) + assert np.linalg.norm(vpos[3]) <= 1 # ensure node 3 fits (#3753) + + def test_smoke_initial_pos_forceatlas2(self): + pos = nx.circular_layout(self.Gi) + npos = nx.forceatlas2_layout(self.Gi, pos=pos) + + def test_smoke_initial_pos_fruchterman_reingold(self): + pos = nx.circular_layout(self.Gi) + npos = nx.fruchterman_reingold_layout(self.Gi, pos=pos) + + def test_smoke_initial_pos_arf(self): + pos = nx.circular_layout(self.Gi) + npos = nx.arf_layout(self.Gi, pos=pos) + + def test_fixed_node_fruchterman_reingold(self): + # Dense version (numpy based) + pos = nx.circular_layout(self.Gi) + npos = nx.spring_layout(self.Gi, pos=pos, fixed=[(0, 0)]) + assert tuple(pos[(0, 0)]) == tuple(npos[(0, 0)]) + # Sparse version (scipy based) + pos = nx.circular_layout(self.bigG) + npos = nx.spring_layout(self.bigG, pos=pos, fixed=[(0, 0)]) + for axis in range(2): + assert pos[(0, 0)][axis] == pytest.approx(npos[(0, 0)][axis], abs=1e-7) + + def test_center_parameter(self): + G = nx.path_graph(1) + nx.random_layout(G, center=(1, 1)) + vpos = nx.circular_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) + vpos = nx.planar_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) + vpos = nx.spring_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) + vpos = nx.fruchterman_reingold_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) + vpos = nx.spectral_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) + vpos = nx.shell_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) + vpos = nx.spiral_layout(G, center=(1, 1)) + assert tuple(vpos[0]) == (1, 1) + + def test_center_wrong_dimensions(self): + G = nx.path_graph(1) + assert id(nx.spring_layout) == id(nx.fruchterman_reingold_layout) + pytest.raises(ValueError, nx.random_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.circular_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.planar_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.spring_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.spring_layout, G, dim=3, center=(1, 1)) + pytest.raises(ValueError, nx.spectral_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.spectral_layout, G, dim=3, center=(1, 1)) + pytest.raises(ValueError, nx.shell_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.spiral_layout, G, center=(1, 1, 1)) + pytest.raises(ValueError, nx.kamada_kawai_layout, G, center=(1, 1, 1)) + + def test_empty_graph(self): + G = nx.empty_graph() + vpos = nx.random_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.circular_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.planar_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.bipartite_layout(G, G) + assert vpos == {} + vpos = nx.spring_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.fruchterman_reingold_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.spectral_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.shell_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.spiral_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.multipartite_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.kamada_kawai_layout(G, center=(1, 1)) + assert vpos == {} + vpos = nx.forceatlas2_layout(G) + assert vpos == {} + vpos = nx.arf_layout(G) + assert vpos == {} + + def test_bipartite_layout(self): + G = nx.complete_bipartite_graph(3, 5) + top, bottom = nx.bipartite.sets(G) + + vpos = nx.bipartite_layout(G, top) + assert len(vpos) == len(G) + + top_x = vpos[list(top)[0]][0] + bottom_x = vpos[list(bottom)[0]][0] + for node in top: + assert vpos[node][0] == top_x + for node in bottom: + assert vpos[node][0] == bottom_x + + vpos = nx.bipartite_layout( + G, top, align="horizontal", center=(2, 2), scale=2, aspect_ratio=1 + ) + assert len(vpos) == len(G) + + top_y = vpos[list(top)[0]][1] + bottom_y = vpos[list(bottom)[0]][1] + for node in top: + assert vpos[node][1] == top_y + for node in bottom: + assert vpos[node][1] == bottom_y + + pytest.raises(ValueError, nx.bipartite_layout, G, top, align="foo") + + def test_multipartite_layout(self): + sizes = (0, 5, 7, 2, 8) + G = nx.complete_multipartite_graph(*sizes) + + vpos = nx.multipartite_layout(G) + assert len(vpos) == len(G) + + start = 0 + for n in sizes: + end = start + n + assert all(vpos[start][0] == vpos[i][0] for i in range(start + 1, end)) + start += n + + vpos = nx.multipartite_layout(G, align="horizontal", scale=2, center=(2, 2)) + assert len(vpos) == len(G) + + start = 0 + for n in sizes: + end = start + n + assert all(vpos[start][1] == vpos[i][1] for i in range(start + 1, end)) + start += n + + pytest.raises(ValueError, nx.multipartite_layout, G, align="foo") + + def test_kamada_kawai_costfn_1d(self): + costfn = nx.drawing.layout._kamada_kawai_costfn + + pos = np.array([4.0, 7.0]) + invdist = 1 / np.array([[0.1, 2.0], [2.0, 0.3]]) + + cost, grad = costfn(pos, np, invdist, meanweight=0, dim=1) + + assert cost == pytest.approx(((3 / 2.0 - 1) ** 2), abs=1e-7) + assert grad[0] == pytest.approx((-0.5), abs=1e-7) + assert grad[1] == pytest.approx(0.5, abs=1e-7) + + def check_kamada_kawai_costfn(self, pos, invdist, meanwt, dim): + costfn = nx.drawing.layout._kamada_kawai_costfn + + cost, grad = costfn(pos.ravel(), np, invdist, meanweight=meanwt, dim=dim) + + expected_cost = 0.5 * meanwt * np.sum(np.sum(pos, axis=0) ** 2) + for i in range(pos.shape[0]): + for j in range(i + 1, pos.shape[0]): + diff = np.linalg.norm(pos[i] - pos[j]) + expected_cost += (diff * invdist[i][j] - 1.0) ** 2 + + assert cost == pytest.approx(expected_cost, abs=1e-7) + + dx = 1e-4 + for nd in range(pos.shape[0]): + for dm in range(pos.shape[1]): + idx = nd * pos.shape[1] + dm + ps = pos.flatten() + + ps[idx] += dx + cplus = costfn(ps, np, invdist, meanweight=meanwt, dim=pos.shape[1])[0] + + ps[idx] -= 2 * dx + cminus = costfn(ps, np, invdist, meanweight=meanwt, dim=pos.shape[1])[0] + + assert grad[idx] == pytest.approx((cplus - cminus) / (2 * dx), abs=1e-5) + + def test_kamada_kawai_costfn(self): + invdist = 1 / np.array([[0.1, 2.1, 1.7], [2.1, 0.2, 0.6], [1.7, 0.6, 0.3]]) + meanwt = 0.3 + + # 2d + pos = np.array([[1.3, -3.2], [2.7, -0.3], [5.1, 2.5]]) + + self.check_kamada_kawai_costfn(pos, invdist, meanwt, 2) + + # 3d + pos = np.array([[0.9, 8.6, -8.7], [-10, -0.5, -7.1], [9.1, -8.1, 1.6]]) + + self.check_kamada_kawai_costfn(pos, invdist, meanwt, 3) + + def test_spiral_layout(self): + G = self.Gs + + # a lower value of resolution should result in a more compact layout + # intuitively, the total distance from the start and end nodes + # via each node in between (transiting through each) will be less, + # assuming rescaling does not occur on the computed node positions + pos_standard = np.array(list(nx.spiral_layout(G, resolution=0.35).values())) + pos_tighter = np.array(list(nx.spiral_layout(G, resolution=0.34).values())) + distances = np.linalg.norm(pos_standard[:-1] - pos_standard[1:], axis=1) + distances_tighter = np.linalg.norm(pos_tighter[:-1] - pos_tighter[1:], axis=1) + assert sum(distances) > sum(distances_tighter) + + # return near-equidistant points after the first value if set to true + pos_equidistant = np.array(list(nx.spiral_layout(G, equidistant=True).values())) + distances_equidistant = np.linalg.norm( + pos_equidistant[:-1] - pos_equidistant[1:], axis=1 + ) + assert np.allclose( + distances_equidistant[1:], distances_equidistant[-1], atol=0.01 + ) + + def test_spiral_layout_equidistant(self): + G = nx.path_graph(10) + nx.spiral_layout(G, equidistant=True, store_pos_as="pos") + pos = nx.get_node_attributes(G, "pos") + # Extract individual node positions as an array + p = np.array(list(pos.values())) + # Elementwise-distance between node positions + dist = np.linalg.norm(p[1:] - p[:-1], axis=1) + assert np.allclose(np.diff(dist), 0, atol=1e-3) + + def test_forceatlas2_layout_partial_input_test(self): + # check whether partial pos input still returns a full proper position + G = self.Gs + node = nx.utils.arbitrary_element(G) + pos = nx.circular_layout(G) + del pos[node] + pos = nx.forceatlas2_layout(G, pos=pos) + assert len(pos) == len(G) + + def test_rescale_layout_dict(self): + G = nx.empty_graph() + vpos = nx.random_layout(G, center=(1, 1)) + assert nx.rescale_layout_dict(vpos) == {} + + G = nx.empty_graph(2) + vpos = {0: (0.0, 0.0), 1: (1.0, 1.0)} + s_vpos = nx.rescale_layout_dict(vpos) + assert np.linalg.norm([sum(x) for x in zip(*s_vpos.values())]) < 1e-6 + + G = nx.empty_graph(3) + vpos = {0: (0, 0), 1: (1, 1), 2: (0.5, 0.5)} + s_vpos = nx.rescale_layout_dict(vpos) + + expectation = { + 0: np.array((-1, -1)), + 1: np.array((1, 1)), + 2: np.array((0, 0)), + } + for k, v in expectation.items(): + assert (s_vpos[k] == v).all() + s_vpos = nx.rescale_layout_dict(vpos, scale=2) + expectation = { + 0: np.array((-2, -2)), + 1: np.array((2, 2)), + 2: np.array((0, 0)), + } + for k, v in expectation.items(): + assert (s_vpos[k] == v).all() + + def test_arf_layout_partial_input_test(self): + # Checks whether partial pos input still returns a proper position. + G = self.Gs + node = nx.utils.arbitrary_element(G) + pos = nx.circular_layout(G) + del pos[node] + pos = nx.arf_layout(G, pos=pos) + assert len(pos) == len(G) + + def test_arf_layout_negative_a_check(self): + """ + Checks input parameters correctly raises errors. For example, `a` should be larger than 1 + """ + G = self.Gs + pytest.raises(ValueError, nx.arf_layout, G=G, a=-1) + + def test_smoke_seed_input(self): + G = self.Gs + nx.random_layout(G, seed=42) + nx.spring_layout(G, seed=42) + nx.arf_layout(G, seed=42) + nx.forceatlas2_layout(G, seed=42) + + def test_node_at_center(self): + # see gh-7791 avoid divide by zero + G = nx.path_graph(3) + orig_pos = {i: [i - 1, 0.0] for i in range(3)} + new_pos = nx.forceatlas2_layout(G, pos=orig_pos) + + def test_initial_only_some_pos(self): + G = nx.path_graph(3) + orig_pos = {i: [i - 1, 0.0] for i in range(2)} + new_pos = nx.forceatlas2_layout(G, pos=orig_pos, seed=42) + + +def test_multipartite_layout_nonnumeric_partition_labels(): + """See gh-5123.""" + G = nx.Graph() + G.add_node(0, subset="s0") + G.add_node(1, subset="s0") + G.add_node(2, subset="s1") + G.add_node(3, subset="s1") + G.add_edges_from([(0, 2), (0, 3), (1, 2)]) + pos = nx.multipartite_layout(G) + assert len(pos) == len(G) + + +def test_multipartite_layout_layer_order(): + """Return the layers in sorted order if the layers of the multipartite + graph are sortable. See gh-5691""" + G = nx.Graph() + node_group = dict(zip(("a", "b", "c", "d", "e"), (2, 3, 1, 2, 4))) + for node, layer in node_group.items(): + G.add_node(node, subset=layer) + + # Horizontal alignment, therefore y-coord determines layers + pos = nx.multipartite_layout(G, align="horizontal") + + layers = nx.utils.groups(node_group) + pos_from_layers = nx.multipartite_layout(G, align="horizontal", subset_key=layers) + for (n1, p1), (n2, p2) in zip(pos.items(), pos_from_layers.items()): + assert n1 == n2 and (p1 == p2).all() + + # Nodes "a" and "d" are in the same layer + assert pos["a"][-1] == pos["d"][-1] + # positions should be sorted according to layer + assert pos["c"][-1] < pos["a"][-1] < pos["b"][-1] < pos["e"][-1] + + # Make sure that multipartite_layout still works when layers are not sortable + G.nodes["a"]["subset"] = "layer_0" # Can't sort mixed strs/ints + pos_nosort = nx.multipartite_layout(G) # smoke test: this should not raise + assert pos_nosort.keys() == pos.keys() + + +def _num_nodes_per_bfs_layer(pos): + """Helper function to extract the number of nodes in each layer of bfs_layout""" + x = np.array(list(pos.values()))[:, 0] # node positions in layered dimension + _, layer_count = np.unique(x, return_counts=True) + return layer_count + + +@pytest.mark.parametrize("n", range(2, 7)) +def test_bfs_layout_complete_graph(n): + """The complete graph should result in two layers: the starting node and + a second layer containing all neighbors.""" + G = nx.complete_graph(n) + nx.bfs_layout(G, start=0, store_pos_as="pos") + pos = nx.get_node_attributes(G, "pos") + assert np.array_equal(_num_nodes_per_bfs_layer(pos), [1, n - 1]) + + +def test_bfs_layout_barbell(): + G = nx.barbell_graph(5, 3) + # Start in one of the "bells" + pos = nx.bfs_layout(G, start=0) + # start, bell-1, [1] * len(bar)+1, bell-1 + expected_nodes_per_layer = [1, 4, 1, 1, 1, 1, 4] + assert np.array_equal(_num_nodes_per_bfs_layer(pos), expected_nodes_per_layer) + # Start in the other "bell" - expect same layer pattern + pos = nx.bfs_layout(G, start=12) + assert np.array_equal(_num_nodes_per_bfs_layer(pos), expected_nodes_per_layer) + # Starting in the center of the bar, expect layers to be symmetric + pos = nx.bfs_layout(G, start=6) + # Expected layers: {6 (start)}, {5, 7}, {4, 8}, {8 nodes from remainder of bells} + expected_nodes_per_layer = [1, 2, 2, 8] + assert np.array_equal(_num_nodes_per_bfs_layer(pos), expected_nodes_per_layer) + + +def test_bfs_layout_disconnected(): + G = nx.complete_graph(5) + G.add_edges_from([(10, 11), (11, 12)]) + with pytest.raises(nx.NetworkXError, match="bfs_layout didn't include all nodes"): + nx.bfs_layout(G, start=0) + + +def test_bipartite_layout_default_nodes_raises_non_bipartite_input(): + G = nx.complete_graph(5) + with pytest.raises(nx.NetworkXError, match="Graph is not bipartite"): + nx.bipartite_layout(G) + # No exception if nodes are explicitly specified + pos = nx.bipartite_layout(G, nodes=[2, 3]) + + +def test_bipartite_layout_default_nodes(): + G = nx.complete_bipartite_graph(3, 3) + pos = nx.bipartite_layout(G) # no nodes specified + # X coords of nodes should be the same within the bipartite sets + for nodeset in nx.bipartite.sets(G): + xs = [pos[k][0] for k in nodeset] + assert all(x == pytest.approx(xs[0]) for x in xs) + + +@pytest.mark.parametrize( + "layout", + [ + nx.random_layout, + nx.circular_layout, + nx.shell_layout, + nx.spring_layout, + nx.kamada_kawai_layout, + nx.spectral_layout, + nx.planar_layout, + nx.spiral_layout, + nx.forceatlas2_layout, + ], +) +def test_layouts_negative_dim(layout): + """Test all layouts that support dim kwarg handle invalid inputs.""" + G = nx.path_graph(4) + valid_err_msgs = "|".join( + [ + "negative dimensions.*not allowed", + "can only handle 2", + "cannot handle.*2", + ] + ) + with pytest.raises(ValueError, match=valid_err_msgs): + layout(G, dim=-1) + + +@pytest.mark.parametrize( + ("num_nodes", "expected_method"), [(100, "force"), (501, "energy")] +) +@pytest.mark.parametrize( + "extra_layout_kwargs", + [ + {}, # No extra kwargs + {"pos": {0: (0, 0)}, "fixed": [0]}, # Fixed node position + {"dim": 3}, # 3D layout + ], +) +def test_spring_layout_graph_size_heuristic( + num_nodes, expected_method, extra_layout_kwargs +): + """Expect 'force' layout for n < 500 and 'energy' for n >= 500""" + G = nx.cycle_graph(num_nodes) + # Seeded layout to compare explicit method to one determined by "auto" + seed = 163674319 + + # Compare explicit method to auto method + expected = nx.spring_layout( + G, method=expected_method, seed=seed, **extra_layout_kwargs + ) + actual = nx.spring_layout(G, method="auto", seed=seed, **extra_layout_kwargs) + assert np.allclose(list(expected.values()), list(actual.values()), atol=1e-5) diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_pydot.py b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_pydot.py new file mode 100644 index 0000000..acf93d7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_pydot.py @@ -0,0 +1,146 @@ +"""Unit tests for pydot drawing functions.""" + +from io import StringIO + +import pytest + +import networkx as nx +from networkx.utils import graphs_equal + +pydot = pytest.importorskip("pydot") + + +class TestPydot: + @pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph())) + @pytest.mark.parametrize("prog", ("neato", "dot")) + def test_pydot(self, G, prog, tmp_path): + """ + Validate :mod:`pydot`-based usage of the passed NetworkX graph with the + passed basename of an external GraphViz command (e.g., `dot`, `neato`). + """ + + # Set the name of this graph to... "G". Failing to do so will + # subsequently trip an assertion expecting this name. + G.graph["name"] = "G" + + # Add arbitrary nodes and edges to the passed empty graph. + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("A", "D")]) + G.add_node("E") + + # Validate layout of this graph with the passed GraphViz command. + graph_layout = nx.nx_pydot.pydot_layout(G, prog=prog) + assert isinstance(graph_layout, dict) + + # Convert this graph into a "pydot.Dot" instance. + P = nx.nx_pydot.to_pydot(G) + + # Convert this "pydot.Dot" instance back into a graph of the same type. + G2 = G.__class__(nx.nx_pydot.from_pydot(P)) + + # Validate the original and resulting graphs to be the same. + assert graphs_equal(G, G2) + + fname = tmp_path / "out.dot" + + # Serialize this "pydot.Dot" instance to a temporary file in dot format + P.write_raw(fname) + + # Deserialize a list of new "pydot.Dot" instances back from this file. + Pin_list = pydot.graph_from_dot_file(path=fname, encoding="utf-8") + + # Validate this file to contain only one graph. + assert len(Pin_list) == 1 + + # The single "pydot.Dot" instance deserialized from this file. + Pin = Pin_list[0] + + # Sorted list of all nodes in the original "pydot.Dot" instance. + n1 = sorted(p.get_name() for p in P.get_node_list()) + + # Sorted list of all nodes in the deserialized "pydot.Dot" instance. + n2 = sorted(p.get_name() for p in Pin.get_node_list()) + + # Validate these instances to contain the same nodes. + assert n1 == n2 + + # Sorted list of all edges in the original "pydot.Dot" instance. + e1 = sorted((e.get_source(), e.get_destination()) for e in P.get_edge_list()) + + # Sorted list of all edges in the original "pydot.Dot" instance. + e2 = sorted((e.get_source(), e.get_destination()) for e in Pin.get_edge_list()) + + # Validate these instances to contain the same edges. + assert e1 == e2 + + # Deserialize a new graph of the same type back from this file. + Hin = nx.nx_pydot.read_dot(fname) + Hin = G.__class__(Hin) + + # Validate the original and resulting graphs to be the same. + assert graphs_equal(G, Hin) + + def test_read_write(self): + G = nx.MultiGraph() + G.graph["name"] = "G" + G.add_edge("1", "2", key="0") # read assumes strings + fh = StringIO() + nx.nx_pydot.write_dot(G, fh) + fh.seek(0) + H = nx.nx_pydot.read_dot(fh) + assert graphs_equal(G, H) + + +def test_pydot_issue_7581(tmp_path): + """Validate that `nx_pydot.pydot_layout` handles nodes + with characters like "\n", " ". + + Those characters cause `pydot` to escape and quote them on output, + which caused #7581. + """ + G = nx.Graph() + G.add_edges_from([("A\nbig test", "B"), ("A\nbig test", "C"), ("B", "C")]) + + graph_layout = nx.nx_pydot.pydot_layout(G, prog="dot") + assert isinstance(graph_layout, dict) + + # Convert the graph to pydot and back into a graph. There should be no difference. + P = nx.nx_pydot.to_pydot(G) + G2 = nx.Graph(nx.nx_pydot.from_pydot(P)) + assert graphs_equal(G, G2) + + +@pytest.mark.parametrize( + "graph_type", [nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph] +) +def test_hashable_pydot(graph_type): + # gh-5790 + G = graph_type() + G.add_edge("5", frozenset([1]), t='"Example:A"', l=False) + G.add_edge("1", 2, w=True, t=("node1",), l=frozenset(["node1"])) + G.add_edge("node", (3, 3), w="string") + + assert [ + {"t": '"Example:A"', "l": "False"}, + {"w": "True", "t": "('node1',)", "l": "frozenset({'node1'})"}, + {"w": "string"}, + ] == [ + attr + for _, _, attr in nx.nx_pydot.from_pydot(nx.nx_pydot.to_pydot(G)).edges.data() + ] + + assert {str(i) for i in G.nodes()} == set( + nx.nx_pydot.from_pydot(nx.nx_pydot.to_pydot(G)).nodes + ) + + +def test_pydot_numerical_name(): + G = nx.Graph() + G.add_edges_from([("A", "B"), (0, 1)]) + graph_layout = nx.nx_pydot.pydot_layout(G, prog="dot") + assert isinstance(graph_layout, dict) + assert "0" not in graph_layout + assert 0 in graph_layout + assert "1" not in graph_layout + assert 1 in graph_layout + assert "A" in graph_layout + assert "B" in graph_layout diff --git a/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_pylab.py b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_pylab.py new file mode 100644 index 0000000..5335dd4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/drawing/tests/test_pylab.py @@ -0,0 +1,1756 @@ +"""Unit tests for matplotlib drawing functions.""" + +import itertools +import os +import warnings + +import pytest + +import networkx as nx + +mpl = pytest.importorskip("matplotlib") +np = pytest.importorskip("numpy") +mpl.use("PS") +plt = pytest.importorskip("matplotlib.pyplot") +plt.rcParams["text.usetex"] = False + + +barbell = nx.barbell_graph(4, 6) + +defaults = { + "node_pos": None, + "node_visible": True, + "node_color": "#1f78b4", + "node_size": 300, + "node_label": { + "size": 12, + "color": "#000000", + "family": "sans-serif", + "weight": "normal", + "alpha": 1.0, + "background_color": None, + "background_alpha": None, + "h_align": "center", + "v_align": "center", + "bbox": None, + }, + "node_shape": "o", + "node_alpha": 1.0, + "node_border_width": 1.0, + "node_border_color": "face", + "edge_visible": True, + "edge_width": 1.0, + "edge_color": "#000000", + "edge_label": { + "size": 12, + "color": "#000000", + "family": "sans-serif", + "weight": "normal", + "alpha": 1.0, + "bbox": {"boxstyle": "round", "ec": (1.0, 1.0, 1.0), "fc": (1.0, 1.0, 1.0)}, + "h_align": "center", + "v_align": "center", + "pos": 0.5, + "rotate": True, + }, + "edge_style": "-", + "edge_alpha": 1.0, + # These are for undirected-graphs. Directed graphs shouls use "-|>" and 10, respectively + "edge_arrowstyle": "-", + "edge_arrowsize": 0, + "edge_curvature": "arc3", + "edge_source_margin": 0, + "edge_target_margin": 0, +} + + +@pytest.mark.parametrize( + ("param_name", "param_value", "expected"), + ( + ("node_color", None, defaults["node_color"]), + ("node_color", "#FF0000", "red"), + ("node_color", "color", "lime"), + ), +) +def test_display_arg_handling_node_color(param_name, param_value, expected): + G = nx.path_graph(4) + nx.set_node_attributes(G, "#00FF00", "color") + canvas = plt.figure().add_subplot(111) + nx.display(G, canvas=canvas, **{param_name: param_value}) + assert mpl.colors.same_color(canvas.get_children()[0].get_edgecolors()[0], expected) + plt.close() + + +@pytest.mark.parametrize( + ("param_value", "expected"), + ( + (None, (1, 1, 1, 1)), # default value + (0.5, (0.5, 0.5, 0.5, 0.5)), + ("n_alpha", (1.0, 1 / 2, 1 / 3, 0.25)), + ), +) +def test_display_arg_handling_node_alpha(param_value, expected): + G = nx.path_graph(4) + nx.set_node_attributes(G, {n: 1 / (n + 1) for n in G.nodes()}, "n_alpha") + canvas = plt.figure().add_subplot(111) + nx.display(G, canvas=canvas, node_alpha=param_value) + assert all( + canvas.get_children()[0].get_fc()[:, 3] == expected + ) # Extract just the alpha from the node colors + plt.close() + + +def test_display_node_position(): + G = nx.path_graph(4) + nx.set_node_attributes(G, {n: (n, n) for n in G.nodes()}, "pos") + canvas = plt.figure().add_subplot(111) + nx.display(G, canvas=canvas, node_pos="pos") + assert np.all( + canvas.get_children()[0].get_offsets().data == [[0, 0], [1, 1], [2, 2], [3, 3]] + ) + plt.close() + + +@pytest.mark.mpl_image_compare +def test_display_house_with_colors(): + """ + Originally, I wanted to use the exact samge image as test_house_with_colors. + But I can't seem to find the correct value for the margins to get the figures + to line up perfectly. To the human eye, these visualizations are basically the + same. + """ + G = nx.house_graph() + fig, ax = plt.subplots() + nx.set_node_attributes( + G, {0: (0, 0), 1: (1, 0), 2: (0, 1), 3: (1, 1), 4: (0.5, 2.0)}, "pos" + ) + nx.set_node_attributes( + G, + { + n: { + "size": 3000 if n != 4 else 2000, + "color": "tab:blue" if n != 4 else "tab:orange", + } + for n in G.nodes() + }, + ) + nx.display( + G, + node_pos="pos", + edge_alpha=0.5, + edge_width=6, + node_label=None, + node_border_color="k", + ) + ax.margins(0.17) + plt.tight_layout() + plt.axis("off") + return fig + + +def test_display_line_collection(): + G = nx.karate_club_graph() + nx.set_edge_attributes( + G, {(u, v): "-|>" if (u + v) % 2 else "-" for u, v in G.edges()}, "arrowstyle" + ) + canvas = plt.figure().add_subplot(111) + nx.display(G, canvas=canvas, edge_arrowsize=10) + # There should only be one line collection in any given visualization + lc = [ + l + for l in canvas.get_children() + if isinstance(l, mpl.collections.LineCollection) + ][0] + assert len(lc.get_paths()) == sum([1 for u, v in G.edges() if (u + v) % 2]) + plt.close() + + +@pytest.mark.mpl_image_compare +def test_display_labels_and_colors(): + """See 'Labels and Colors' gallery example""" + fig, ax = plt.subplots() + G = nx.cubical_graph() + pos = nx.spring_layout(G, seed=3113794652) # positions for all nodes + nx.set_node_attributes(G, pos, "pos") # Will not be needed after PR 7571 + labels = iter( + [ + r"$a$", + r"$b$", + r"$c$", + r"$d$", + r"$\alpha$", + r"$\beta$", + r"$\gamma$", + r"$\delta$", + ] + ) + nx.set_node_attributes( + G, + { + n: { + "size": 800, + "alpha": 0.9, + "color": "tab:red" if n < 4 else "tab:blue", + "label": {"label": next(labels), "size": 22, "color": "whitesmoke"}, + } + for n in G.nodes() + }, + ) + + nx.display(G, node_pos="pos", edge_color="tab:grey") + + # The tricky bit is the highlighted colors for the edges + edgelist = [(0, 1), (1, 2), (2, 3), (0, 3)] + nx.set_edge_attributes( + G, + { + (u, v): { + "width": 8, + "alpha": 0.5, + "color": "tab:red", + "visible": (u, v) in edgelist, + } + for u, v in G.edges() + }, + ) + nx.display(G, node_pos="pos", node_visible=False) + edgelist = [(4, 5), (5, 6), (6, 7), (4, 7)] + nx.set_edge_attributes( + G, + { + (u, v): { + "color": "tab:blue", + "visible": (u, v) in edgelist, + } + for u, v in G.edges() + }, + ) + nx.display(G, node_pos="pos", node_visible=False) + + plt.tight_layout() + plt.axis("off") + return fig + + +@pytest.mark.mpl_image_compare +def test_display_complex(): + import itertools as it + + fig, ax = plt.subplots() + G = nx.MultiDiGraph() + nodes = "ABC" + prod = list(it.product(nodes, repeat=2)) * 4 + G = nx.MultiDiGraph() + for i, (u, v) in enumerate(prod): + G.add_edge(u, v, w=round(i / 3, 2)) + nx.set_node_attributes(G, nx.spring_layout(G, seed=3113794652), "pos") + csi = it.cycle([f"arc3,rad={r}" for r in it.accumulate([0.15] * 4)]) + nx.set_edge_attributes(G, {e: next(csi) for e in G.edges(keys=True)}, "curvature") + nx.set_edge_attributes( + G, + { + tuple(e): {"label": w, "bbox": {"alpha": 0}} + for *e, w in G.edges(keys=True, data="w") + }, + "label", + ) + nx.apply_matplotlib_colors(G, "w", "color", mpl.colormaps["inferno"], nodes=False) + nx.display(G, canvas=ax, node_pos="pos") + + plt.tight_layout() + plt.axis("off") + return fig + + +@pytest.mark.mpl_image_compare +def test_display_shortest_path(): + fig, ax = plt.subplots() + G = nx.Graph() + G.add_nodes_from(["A", "B", "C", "D", "E", "F", "G", "H"]) + G.add_edge("A", "B", weight=4) + G.add_edge("A", "H", weight=8) + G.add_edge("B", "C", weight=8) + G.add_edge("B", "H", weight=11) + G.add_edge("C", "D", weight=7) + G.add_edge("C", "F", weight=4) + G.add_edge("C", "I", weight=2) + G.add_edge("D", "E", weight=9) + G.add_edge("D", "F", weight=14) + G.add_edge("E", "F", weight=10) + G.add_edge("F", "G", weight=2) + G.add_edge("G", "H", weight=1) + G.add_edge("G", "I", weight=6) + G.add_edge("H", "I", weight=7) + + # Find the shortest path from node A to node E + path = nx.shortest_path(G, "A", "E", weight="weight") + + # Create a list of edges in the shortest path + path_edges = list(zip(path, path[1:])) + nx.set_node_attributes(G, nx.spring_layout(G, seed=37), "pos") + nx.set_edge_attributes( + G, + { + (u, v): { + "color": ( + "red" + if (u, v) in path_edges or tuple(reversed((u, v))) in path_edges + else "black" + ), + "label": {"label": d["weight"], "rotate": False}, + } + for u, v, d in G.edges(data=True) + }, + ) + nx.display(G, canvas=ax) + plt.tight_layout() + plt.axis("off") + return fig + + +@pytest.mark.parametrize( + ("edge_color", "expected"), + ( + (None, "black"), + ("r", "red"), + ((1.0, 1.0, 0.0), "yellow"), + ((0, 1, 0, 1), "lime"), + ("color", "blue"), + ("#0000FF", "blue"), + ), +) +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_display_edge_single_color(edge_color, expected, graph_type): + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, "#0000FF", "color") + canvas = plt.figure().add_subplot(111) + nx.display(G, edge_color=edge_color, canvas=canvas) + if G.is_directed(): + colors = [ + f.get_fc() + for f in canvas.get_children() + if isinstance(f, mpl.patches.FancyArrowPatch) + ] + else: + colors = [ + l + for l in canvas.collections + if isinstance(l, mpl.collections.LineCollection) + ][0].get_colors() + assert all(mpl.colors.same_color(c, expected) for c in colors) + plt.close() + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_display_edge_multiple_colors(graph_type): + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, {(0, 1): "#FF0000", (1, 2): (0, 0, 1)}, "color") + ax = plt.figure().add_subplot(111) + nx.display(G, canvas=ax) + expected = ["red", "blue"] + if G.is_directed(): + colors = [ + f.get_fc() + for f in ax.get_children() + if isinstance(f, mpl.patches.FancyArrowPatch) + ] + else: + colors = [ + l for l in ax.collections if isinstance(l, mpl.collections.LineCollection) + ][0].get_colors() + assert mpl.colors.same_color(colors, expected) + plt.close() + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_display_edge_position(graph_type): + G = nx.path_graph(3, create_using=graph_type) + nx.set_node_attributes(G, {n: (n, n) for n in G.nodes()}, "pos") + ax = plt.figure().add_subplot(111) + nx.display(G, canvas=ax) + if G.is_directed(): + end_points = [ + (f.get_path().vertices[0, :], f.get_path().vertices[-2, :]) + for f in ax.get_children() + if isinstance(f, mpl.patches.FancyArrowPatch) + ] + else: + line_collection = [ + l for l in ax.collections if isinstance(l, mpl.collections.LineCollection) + ][0] + end_points = [ + (p.vertices[0, :], p.vertices[-1, :]) for p in line_collection.get_paths() + ] + expected = [((0, 0), (1, 1)), ((1, 1), (2, 2))] + # Use the threshold to account for slight shifts in FancyArrowPatch margins to + # avoid covering the arrow head with the node. + threshold = 0.05 + for a, e in zip(end_points, expected): + act_start, act_end = a + exp_start, exp_end = e + assert all(abs(act_start - exp_start) < (threshold, threshold)) and all( + abs(act_end - exp_end) < (threshold, threshold) + ) + plt.close() + + +def test_display_position_function(): + G = nx.karate_club_graph() + + def fixed_layout(G): + return nx.spring_layout(G, seed=314159) + + pos = fixed_layout(G) + ax = plt.figure().add_subplot(111) + nx.display(G, node_pos=fixed_layout, canvas=ax) + # rebuild the position dictionary from the canvas + act_pos = { + n: tuple(p) for n, p in zip(G.nodes(), ax.get_children()[0].get_offsets().data) + } + for n in G.nodes(): + assert all(pos[n] == act_pos[n]) + plt.close() + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_display_edge_colormaps(graph_type): + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, {(0, 1): 0, (1, 2): 1}, "weight") + cmap = mpl.colormaps["plasma"] + nx.apply_matplotlib_colors(G, "weight", "color", cmap, nodes=False) + canvas = plt.figure().add_subplot(111) + nx.display(G, canvas=canvas) + mapper = mpl.cm.ScalarMappable(cmap=cmap) + mapper.set_clim(0, 1) + expected = [mapper.to_rgba(0), mapper.to_rgba(1)] + if G.is_directed(): + colors = [ + f.get_facecolor() + for f in canvas.get_children() + if isinstance(f, mpl.patches.FancyArrowPatch) + ] + else: + colors = [ + l + for l in canvas.collections + if isinstance(l, mpl.collections.LineCollection) + ][0].get_colors() + assert mpl.colors.same_color(expected[0], G.edges[0, 1]["color"]) + assert mpl.colors.same_color(expected[1], G.edges[1, 2]["color"]) + assert mpl.colors.same_color(expected, colors) + plt.close() + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_display_node_colormaps(graph_type): + G = nx.path_graph(3, create_using=graph_type) + nx.set_node_attributes(G, {0: 0, 1: 0.5, 2: 1}, "weight") + cmap = mpl.colormaps["plasma"] + nx.apply_matplotlib_colors(G, "weight", "color", cmap) + canvas = plt.figure().add_subplot(111) + nx.display(G, canvas=canvas) + mapper = mpl.cm.ScalarMappable(cmap=cmap) + mapper.set_clim(0, 1) + expected = [mapper.to_rgba(0), mapper.to_rgba(0.5), mapper.to_rgba(1)] + colors = [ + s for s in canvas.collections if isinstance(s, mpl.collections.PathCollection) + ][0].get_edgecolors() + assert mpl.colors.same_color(expected[0], G.nodes[0]["color"]) + assert mpl.colors.same_color(expected[1], G.nodes[1]["color"]) + assert mpl.colors.same_color(expected[2], G.nodes[2]["color"]) + assert mpl.colors.same_color(expected, colors) + plt.close() + + +@pytest.mark.parametrize( + ("param_value", "expected"), + ( + (None, [defaults["edge_width"], defaults["edge_width"]]), + (5, [5, 5]), + ("width", [5, 10]), + ), +) +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_display_edge_width(param_value, expected, graph_type): + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, {(0, 1): 5, (1, 2): 10}, "width") + canvas = plt.figure().add_subplot(111) + nx.display(G, edge_width=param_value, canvas=canvas) + if G.is_directed(): + widths = [ + f.get_linewidth() + for f in canvas.get_children() + if isinstance(f, mpl.patches.FancyArrowPatch) + ] + else: + widths = list( + [ + l + for l in canvas.collections + if isinstance(l, mpl.collections.LineCollection) + ][0].get_linewidths() + ) + assert widths == expected + + +@pytest.mark.parametrize( + ("param_value", "expected"), + ( + (None, [defaults["edge_style"], defaults["edge_style"]]), + (":", [":", ":"]), + ("style", ["-", ":"]), + ), +) +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_display_edge_style(param_value, expected, graph_type): + G = nx.path_graph(3, create_using=graph_type) + nx.set_edge_attributes(G, {(0, 1): "-", (1, 2): ":"}, "style") + canvas = plt.figure().add_subplot(111) + nx.display(G, edge_style=param_value, canvas=canvas) + if G.is_directed(): + styles = [ + f.get_linestyle() + for f in canvas.get_children() + if isinstance(f, mpl.patches.FancyArrowPatch) + ] + else: + # Convert back from tuple description to character form + linestyles = {(0, None): "-", (0, (1, 1.65)): ":"} + styles = [ + linestyles[(s[0], tuple(s[1]) if s[1] is not None else None)] + for s in [ + l + for l in canvas.collections + if isinstance(l, mpl.collections.LineCollection) + ][0].get_linestyles() + ] + assert styles == expected + plt.close() + + +def test_display_node_labels(): + G = nx.path_graph(4) + canvas = plt.figure().add_subplot(111) + nx.display(G, canvas=canvas, node_label={"size": 20}) + labels = [t for t in canvas.get_children() if isinstance(t, mpl.text.Text)] + for n, l in zip(G.nodes(), labels): + assert l.get_text() == str(n) + assert l.get_size() == 20.0 + plt.close() + + +def test_display_edge_labels(): + G = nx.path_graph(4) + canvas = plt.figure().add_subplot(111) + # While we can pass in dicts for edge label defaults without errors, + # this isn't helpful unless we want one label for all edges. + nx.set_edge_attributes(G, {(u, v): {"label": u + v} for u, v in G.edges()}) + nx.display(G, canvas=canvas, edge_label={"color": "r"}, node_label=None) + labels = [t for t in canvas.get_children() if isinstance(t, mpl.text.Text)] + print(labels) + for e, l in zip(G.edges(), labels): + assert l.get_text() == str(e[0] + e[1]) + assert l.get_color() == "r" + plt.close() + + +@pytest.mark.mpl_image_compare +def test_display_empty_graph(): + G = nx.empty_graph() + fig, ax = plt.subplots() + nx.display(G, canvas=ax) + plt.tight_layout() + plt.axis("off") + return fig + + +def test_display_multigraph_non_integer_keys(): + G = nx.MultiGraph() + G.add_nodes_from(["A", "B", "C", "D"]) + G.add_edges_from( + [ + ("A", "B", "0"), + ("A", "B", "1"), + ("B", "C", "-1"), + ("B", "C", "1"), + ("C", "D", "-1"), + ("C", "D", "0"), + ] + ) + nx.set_edge_attributes( + G, {e: f"arc3,rad={0.2 * int(e[2])}" for e in G.edges(keys=True)}, "curvature" + ) + canvas = plt.figure().add_subplot(111) + nx.display(G, canvas=canvas) + rads = [ + f.get_connectionstyle().rad + for f in canvas.get_children() + if isinstance(f, mpl.patches.FancyArrowPatch) + ] + assert rads == [0.0, 0.2, -0.2, 0.2, -0.2, 0.0] + plt.close() + + +def test_display_raises_for_bad_arg(): + G = nx.karate_club_graph() + with pytest.raises(nx.NetworkXError): + nx.display(G, bad_arg="bad_arg") + plt.close() + + +def test_display_arrow_size(): + G = nx.path_graph(4, create_using=nx.DiGraph) + nx.set_edge_attributes( + G, {(u, v): (u + v + 2) ** 2 for u, v in G.edges()}, "arrowsize" + ) + ax = plt.axes() + nx.display(G, canvas=ax) + assert [9, 25, 49] == [ + f.get_mutation_scale() + for f in ax.get_children() + if isinstance(f, mpl.patches.FancyArrowPatch) + ] + plt.close() + + +def test_display_mismatched_edge_position(): + """ + This test ensures that a error is raised for incomplete position data. + """ + G = nx.path_graph(5) + # Notice that there is no position for node 3 + nx.set_node_attributes(G, {0: (0, 0), 1: (1, 1), 2: (2, 2), 4: (4, 4)}, "pos") + # But that's not a problem since we don't want to show node 4, right? + nx.set_node_attributes(G, {n: n < 4 for n in G.nodes()}, "visible") + # However, if we try to visualize every edge (including 3 -> 4)... + # That's a problem since node 4 doesn't have a position + with pytest.raises(nx.NetworkXError): + nx.display(G) + + +# NOTE: parametrizing on marker to test both branches of internal +# to_marker_edge function +@pytest.mark.parametrize("node_shape", ("o", "s")) +def test_display_edge_margins(node_shape): + """ + Test that there is a wider gap between the node and the start of an + incident edge when min_source_margin is specified. + + This test checks that the use os min_{source/target}_margin edge + attributes result is shorter (more padding) between the edges and + source and target nodes. + + + As a crude visual example, let 's' and 't' represent source and target + nodes, respectively: + + Default: + s-----------------------------t + + With margins: + s ----------------------- t + + """ + ax = plt.figure().add_subplot(111) + G = nx.DiGraph([(0, 1)]) + nx.set_node_attributes(G, {0: (0, 0), 1: (1, 1)}, "pos") + # Get the default patches from the regular visualization + nx.display(G, canvas=ax, node_shape=node_shape) + default_arrow = [ + f for f in ax.get_children() if isinstance(f, mpl.patches.FancyArrowPatch) + ][0] + default_extent = default_arrow.get_extents().corners()[::2, 0] + # Now plot again with margins + ax = plt.figure().add_subplot(111) + nx.display( + G, + canvas=ax, + edge_source_margin=100, + edge_target_margin=100, + node_shape=node_shape, + ) + padded_arrow = [ + f for f in ax.get_children() if isinstance(f, mpl.patches.FancyArrowPatch) + ][0] + padded_extent = padded_arrow.get_extents().corners()[::2, 0] + + # With padding, the left-most extent of the edge should be further to the right + assert padded_extent[0] > default_extent[0] + # And the rightmost extent of the edge, further to the left + assert padded_extent[1] < default_extent[1] + plt.close() + + +@pytest.mark.parametrize("ticks", [False, True]) +def test_display_hide_ticks(ticks): + G = nx.path_graph(3) + nx.set_node_attributes(G, {n: (n, n) for n in G.nodes()}, "pos") + ax = plt.axes() + nx.display(G, hide_ticks=ticks) + for axis in [ax.xaxis, ax.yaxis]: + assert bool(axis.get_ticklabels()) != ticks + + plt.close() + + +def test_display_self_loop(): + ax = plt.axes() + G = nx.DiGraph() + G.add_node(0) + G.add_edge(0, 0) + nx.set_node_attributes(G, {0: (0, 0)}, "pos") + nx.display(G, canvas=ax) + arrow = [ + f for f in ax.get_children() if isinstance(f, mpl.patches.FancyArrowPatch) + ][0] + bbox = arrow.get_extents() + print(bbox.width) + print(bbox.height) + assert bbox.width > 0 and bbox.height > 0 + + plt.delaxes(ax) + plt.close() + + +def test_display_remove_pos_attr(): + """ + If the pos attribute isn't provided or is a function, display computes the layout + and adds it to the graph. We need to ensure that this new attribute is removed from + the returned graph. + """ + G = nx.karate_club_graph() + nx.display(G) + assert nx.get_node_attributes(G, "display's position attribute name") == {} + + +@pytest.fixture +def subplots(): + fig, ax = plt.subplots() + yield fig, ax + plt.delaxes(ax) + plt.close() + + +def test_draw(): + try: + functions = [ + nx.draw_circular, + nx.draw_kamada_kawai, + nx.draw_planar, + nx.draw_random, + nx.draw_spectral, + nx.draw_spring, + nx.draw_shell, + ] + options = [{"node_color": "black", "node_size": 100, "width": 3}] + for function, option in itertools.product(functions, options): + function(barbell, **option) + plt.savefig("test.ps") + except ModuleNotFoundError: # draw_kamada_kawai requires scipy + pass + finally: + try: + os.unlink("test.ps") + except OSError: + pass + + +def test_draw_shell_nlist(): + try: + nlist = [list(range(4)), list(range(4, 10)), list(range(10, 14))] + nx.draw_shell(barbell, nlist=nlist) + plt.savefig("test.ps") + finally: + try: + os.unlink("test.ps") + except OSError: + pass + + +def test_draw_bipartite(): + try: + G = nx.complete_bipartite_graph(2, 5) + nx.draw_bipartite(G) + plt.savefig("test.ps") + finally: + try: + os.unlink("test.ps") + except OSError: + pass + + +def test_edge_colormap(): + colors = range(barbell.number_of_edges()) + nx.draw_spring( + barbell, edge_color=colors, width=4, edge_cmap=plt.cm.Blues, with_labels=True + ) + # plt.show() + + +def test_arrows(): + nx.draw_spring(barbell.to_directed()) + # plt.show() + + +@pytest.mark.parametrize( + ("edge_color", "expected"), + ( + (None, "black"), # Default + ("r", "red"), # Non-default color string + (["r"], "red"), # Single non-default color in a list + ((1.0, 1.0, 0.0), "yellow"), # single color as rgb tuple + ([(1.0, 1.0, 0.0)], "yellow"), # single color as rgb tuple in list + ((0, 1, 0, 1), "lime"), # single color as rgba tuple + ([(0, 1, 0, 1)], "lime"), # single color as rgba tuple in list + ("#0000ff", "blue"), # single color hex code + (["#0000ff"], "blue"), # hex code in list + ), +) +@pytest.mark.parametrize("edgelist", (None, [(0, 1)])) +def test_single_edge_color_undirected(edge_color, expected, edgelist): + """Tests ways of specifying all edges have a single color for edges + drawn with a LineCollection""" + + G = nx.path_graph(3) + drawn_edges = nx.draw_networkx_edges( + G, pos=nx.random_layout(G), edgelist=edgelist, edge_color=edge_color + ) + assert mpl.colors.same_color(drawn_edges.get_color(), expected) + + +@pytest.mark.parametrize( + ("edge_color", "expected"), + ( + (None, "black"), # Default + ("r", "red"), # Non-default color string + (["r"], "red"), # Single non-default color in a list + ((1.0, 1.0, 0.0), "yellow"), # single color as rgb tuple + ([(1.0, 1.0, 0.0)], "yellow"), # single color as rgb tuple in list + ((0, 1, 0, 1), "lime"), # single color as rgba tuple + ([(0, 1, 0, 1)], "lime"), # single color as rgba tuple in list + ("#0000ff", "blue"), # single color hex code + (["#0000ff"], "blue"), # hex code in list + ), +) +@pytest.mark.parametrize("edgelist", (None, [(0, 1)])) +def test_single_edge_color_directed(edge_color, expected, edgelist): + """Tests ways of specifying all edges have a single color for edges drawn + with FancyArrowPatches""" + + G = nx.path_graph(3, create_using=nx.DiGraph) + drawn_edges = nx.draw_networkx_edges( + G, pos=nx.random_layout(G), edgelist=edgelist, edge_color=edge_color + ) + for fap in drawn_edges: + assert mpl.colors.same_color(fap.get_edgecolor(), expected) + + +def test_edge_color_tuple_interpretation(): + """If edge_color is a sequence with the same length as edgelist, then each + value in edge_color is mapped onto each edge via colormap.""" + G = nx.path_graph(6, create_using=nx.DiGraph) + pos = {n: (n, n) for n in range(len(G))} + + # num edges != 3 or 4 --> edge_color interpreted as rgb(a) + for ec in ((0, 0, 1), (0, 0, 1, 1)): + # More than 4 edges + drawn_edges = nx.draw_networkx_edges(G, pos, edge_color=ec) + for fap in drawn_edges: + assert mpl.colors.same_color(fap.get_edgecolor(), ec) + # Fewer than 3 edges + drawn_edges = nx.draw_networkx_edges( + G, pos, edgelist=[(0, 1), (1, 2)], edge_color=ec + ) + for fap in drawn_edges: + assert mpl.colors.same_color(fap.get_edgecolor(), ec) + + # num edges == 3, len(edge_color) == 4: interpreted as rgba + drawn_edges = nx.draw_networkx_edges( + G, pos, edgelist=[(0, 1), (1, 2), (2, 3)], edge_color=(0, 0, 1, 1) + ) + for fap in drawn_edges: + assert mpl.colors.same_color(fap.get_edgecolor(), "blue") + + # num edges == 4, len(edge_color) == 3: interpreted as rgb + drawn_edges = nx.draw_networkx_edges( + G, pos, edgelist=[(0, 1), (1, 2), (2, 3), (3, 4)], edge_color=(0, 0, 1) + ) + for fap in drawn_edges: + assert mpl.colors.same_color(fap.get_edgecolor(), "blue") + + # num edges == len(edge_color) == 3: interpreted with cmap, *not* as rgb + drawn_edges = nx.draw_networkx_edges( + G, pos, edgelist=[(0, 1), (1, 2), (2, 3)], edge_color=(0, 0, 1) + ) + assert mpl.colors.same_color( + drawn_edges[0].get_edgecolor(), drawn_edges[1].get_edgecolor() + ) + for fap in drawn_edges: + assert not mpl.colors.same_color(fap.get_edgecolor(), "blue") + + # num edges == len(edge_color) == 4: interpreted with cmap, *not* as rgba + drawn_edges = nx.draw_networkx_edges( + G, pos, edgelist=[(0, 1), (1, 2), (2, 3), (3, 4)], edge_color=(0, 0, 1, 1) + ) + assert mpl.colors.same_color( + drawn_edges[0].get_edgecolor(), drawn_edges[1].get_edgecolor() + ) + assert mpl.colors.same_color( + drawn_edges[2].get_edgecolor(), drawn_edges[3].get_edgecolor() + ) + for fap in drawn_edges: + assert not mpl.colors.same_color(fap.get_edgecolor(), "blue") + + +def test_fewer_edge_colors_than_num_edges_directed(): + """Test that the edge colors are cycled when there are fewer specified + colors than edges.""" + G = barbell.to_directed() + pos = nx.random_layout(barbell) + edgecolors = ("r", "g", "b") + drawn_edges = nx.draw_networkx_edges(G, pos, edge_color=edgecolors) + for fap, expected in zip(drawn_edges, itertools.cycle(edgecolors)): + assert mpl.colors.same_color(fap.get_edgecolor(), expected) + + +def test_more_edge_colors_than_num_edges_directed(): + """Test that extra edge colors are ignored when there are more specified + colors than edges.""" + G = nx.path_graph(4, create_using=nx.DiGraph) # 3 edges + pos = nx.random_layout(barbell) + edgecolors = ("r", "g", "b", "c") # 4 edge colors + drawn_edges = nx.draw_networkx_edges(G, pos, edge_color=edgecolors) + for fap, expected in zip(drawn_edges, edgecolors[:-1]): + assert mpl.colors.same_color(fap.get_edgecolor(), expected) + + +def test_edge_color_string_with_global_alpha_undirected(): + edge_collection = nx.draw_networkx_edges( + barbell, + pos=nx.random_layout(barbell), + edgelist=[(0, 1), (1, 2)], + edge_color="purple", + alpha=0.2, + ) + ec = edge_collection.get_color().squeeze() # as rgba tuple + assert len(edge_collection.get_paths()) == 2 + assert mpl.colors.same_color(ec[:-1], "purple") + assert ec[-1] == 0.2 + + +def test_edge_color_string_with_global_alpha_directed(): + drawn_edges = nx.draw_networkx_edges( + barbell.to_directed(), + pos=nx.random_layout(barbell), + edgelist=[(0, 1), (1, 2)], + edge_color="purple", + alpha=0.2, + ) + assert len(drawn_edges) == 2 + for fap in drawn_edges: + ec = fap.get_edgecolor() # As rgba tuple + assert mpl.colors.same_color(ec[:-1], "purple") + assert ec[-1] == 0.2 + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +def test_edge_width_default_value(graph_type): + """Test the default linewidth for edges drawn either via LineCollection or + FancyArrowPatches.""" + G = nx.path_graph(2, create_using=graph_type) + pos = {n: (n, n) for n in range(len(G))} + drawn_edges = nx.draw_networkx_edges(G, pos) + if isinstance(drawn_edges, list): # directed case: list of FancyArrowPatch + drawn_edges = drawn_edges[0] + assert drawn_edges.get_linewidth() == 1 + + +@pytest.mark.parametrize( + ("edgewidth", "expected"), + ( + (3, 3), # single-value, non-default + ([3], 3), # Single value as a list + ), +) +def test_edge_width_single_value_undirected(edgewidth, expected): + G = nx.path_graph(4) + pos = {n: (n, n) for n in range(len(G))} + drawn_edges = nx.draw_networkx_edges(G, pos, width=edgewidth) + assert len(drawn_edges.get_paths()) == 3 + assert drawn_edges.get_linewidth() == expected + + +@pytest.mark.parametrize( + ("edgewidth", "expected"), + ( + (3, 3), # single-value, non-default + ([3], 3), # Single value as a list + ), +) +def test_edge_width_single_value_directed(edgewidth, expected): + G = nx.path_graph(4, create_using=nx.DiGraph) + pos = {n: (n, n) for n in range(len(G))} + drawn_edges = nx.draw_networkx_edges(G, pos, width=edgewidth) + assert len(drawn_edges) == 3 + for fap in drawn_edges: + assert fap.get_linewidth() == expected + + +@pytest.mark.parametrize( + "edgelist", + ( + [(0, 1), (1, 2), (2, 3)], # one width specification per edge + None, # fewer widths than edges - widths cycle + [(0, 1), (1, 2)], # More widths than edges - unused widths ignored + ), +) +def test_edge_width_sequence(edgelist): + G = barbell.to_directed() + pos = nx.random_layout(G) + widths = (0.5, 2.0, 12.0) + drawn_edges = nx.draw_networkx_edges(G, pos, edgelist=edgelist, width=widths) + for fap, expected_width in zip(drawn_edges, itertools.cycle(widths)): + assert fap.get_linewidth() == expected_width + + +def test_edge_color_with_edge_vmin_vmax(): + """Test that edge_vmin and edge_vmax properly set the dynamic range of the + color map when num edges == len(edge_colors).""" + G = nx.path_graph(3, create_using=nx.DiGraph) + pos = nx.random_layout(G) + # Extract colors from the original (unscaled) colormap + drawn_edges = nx.draw_networkx_edges(G, pos, edge_color=[0, 1.0]) + orig_colors = [e.get_edgecolor() for e in drawn_edges] + # Colors from scaled colormap + drawn_edges = nx.draw_networkx_edges( + G, pos, edge_color=[0.2, 0.8], edge_vmin=0.2, edge_vmax=0.8 + ) + scaled_colors = [e.get_edgecolor() for e in drawn_edges] + assert mpl.colors.same_color(orig_colors, scaled_colors) + + +def test_directed_edges_linestyle_default(): + """Test default linestyle for edges drawn with FancyArrowPatches.""" + G = nx.path_graph(4, create_using=nx.DiGraph) # Graph with 3 edges + pos = {n: (n, n) for n in range(len(G))} + + # edge with default style + drawn_edges = nx.draw_networkx_edges(G, pos) + assert len(drawn_edges) == 3 + for fap in drawn_edges: + assert fap.get_linestyle() == "solid" + + +@pytest.mark.parametrize( + "style", + ( + "dashed", # edge with string style + "--", # edge with simplified string style + (1, (1, 1)), # edge with (offset, onoffseq) style + ), +) +def test_directed_edges_linestyle_single_value(style): + """Tests support for specifying linestyles with a single value to be applied to + all edges in ``draw_networkx_edges`` for FancyArrowPatch outputs + (e.g. directed edges).""" + + G = nx.path_graph(4, create_using=nx.DiGraph) # Graph with 3 edges + pos = {n: (n, n) for n in range(len(G))} + + drawn_edges = nx.draw_networkx_edges(G, pos, style=style) + assert len(drawn_edges) == 3 + for fap in drawn_edges: + assert fap.get_linestyle() == style + + +@pytest.mark.parametrize( + "style_seq", + ( + ["dashed"], # edge with string style in list + ["--"], # edge with simplified string style in list + [(1, (1, 1))], # edge with (offset, onoffseq) style in list + ["--", "-", ":"], # edges with styles for each edge + ["--", "-"], # edges with fewer styles than edges (styles cycle) + ["--", "-", ":", "-."], # edges with more styles than edges (extra unused) + ), +) +def test_directed_edges_linestyle_sequence(style_seq): + """Tests support for specifying linestyles with sequences in + ``draw_networkx_edges`` for FancyArrowPatch outputs (e.g. directed edges).""" + + G = nx.path_graph(4, create_using=nx.DiGraph) # Graph with 3 edges + pos = {n: (n, n) for n in range(len(G))} + + drawn_edges = nx.draw_networkx_edges(G, pos, style=style_seq) + assert len(drawn_edges) == 3 + for fap, style in zip(drawn_edges, itertools.cycle(style_seq)): + assert fap.get_linestyle() == style + + +def test_return_types(): + from matplotlib.collections import LineCollection, PathCollection + from matplotlib.patches import FancyArrowPatch + + G = nx.frucht_graph(create_using=nx.Graph) + dG = nx.frucht_graph(create_using=nx.DiGraph) + pos = nx.spring_layout(G) + dpos = nx.spring_layout(dG) + # nodes + nodes = nx.draw_networkx_nodes(G, pos) + assert isinstance(nodes, PathCollection) + # edges + edges = nx.draw_networkx_edges(dG, dpos, arrows=True) + assert isinstance(edges, list) + if len(edges) > 0: + assert isinstance(edges[0], FancyArrowPatch) + edges = nx.draw_networkx_edges(dG, dpos, arrows=False) + assert isinstance(edges, LineCollection) + edges = nx.draw_networkx_edges(G, dpos, arrows=None) + assert isinstance(edges, LineCollection) + edges = nx.draw_networkx_edges(dG, pos, arrows=None) + assert isinstance(edges, list) + if len(edges) > 0: + assert isinstance(edges[0], FancyArrowPatch) + + +def test_labels_and_colors(): + G = nx.cubical_graph() + pos = nx.spring_layout(G) # positions for all nodes + # nodes + nx.draw_networkx_nodes( + G, pos, nodelist=[0, 1, 2, 3], node_color="r", node_size=500, alpha=0.75 + ) + nx.draw_networkx_nodes( + G, + pos, + nodelist=[4, 5, 6, 7], + node_color="b", + node_size=500, + alpha=[0.25, 0.5, 0.75, 1.0], + ) + # edges + nx.draw_networkx_edges(G, pos, width=1.0, alpha=0.5) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(0, 1), (1, 2), (2, 3), (3, 0)], + width=8, + alpha=0.5, + edge_color="r", + ) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], + width=8, + alpha=0.5, + edge_color="b", + ) + nx.draw_networkx_edges( + G, + pos, + edgelist=[(4, 5), (5, 6), (6, 7), (7, 4)], + arrows=True, + min_source_margin=0.5, + min_target_margin=0.75, + width=8, + edge_color="b", + ) + # some math labels + labels = {} + labels[0] = r"$a$" + labels[1] = r"$b$" + labels[2] = r"$c$" + labels[3] = r"$d$" + labels[4] = r"$\alpha$" + labels[5] = r"$\beta$" + labels[6] = r"$\gamma$" + labels[7] = r"$\delta$" + colors = {n: "k" if n % 2 == 0 else "r" for n in range(8)} + nx.draw_networkx_labels(G, pos, labels, font_size=16) + nx.draw_networkx_labels(G, pos, labels, font_size=16, font_color=colors) + nx.draw_networkx_edge_labels(G, pos, edge_labels=None, rotate=False) + nx.draw_networkx_edge_labels(G, pos, edge_labels={(4, 5): "4-5"}) + # plt.show() + + +@pytest.mark.mpl_image_compare +def test_house_with_colors(): + G = nx.house_graph() + # explicitly set positions + fig, ax = plt.subplots() + pos = {0: (0, 0), 1: (1, 0), 2: (0, 1), 3: (1, 1), 4: (0.5, 2.0)} + + # Plot nodes with different properties for the "wall" and "roof" nodes + nx.draw_networkx_nodes( + G, + pos, + node_size=3000, + nodelist=[0, 1, 2, 3], + node_color="tab:blue", + ) + nx.draw_networkx_nodes( + G, pos, node_size=2000, nodelist=[4], node_color="tab:orange" + ) + nx.draw_networkx_edges(G, pos, alpha=0.5, width=6) + # Customize axes + ax.margins(0.11) + plt.tight_layout() + plt.axis("off") + return fig + + +def test_axes(subplots): + fig, ax = subplots + nx.draw(barbell, ax=ax) + nx.draw_networkx_edge_labels(barbell, nx.circular_layout(barbell), ax=ax) + + +def test_empty_graph(): + G = nx.Graph() + nx.draw(G) + + +def test_draw_empty_nodes_return_values(): + # See Issue #3833 + import matplotlib.collections # call as mpl.collections + + G = nx.Graph([(1, 2), (2, 3)]) + DG = nx.DiGraph([(1, 2), (2, 3)]) + pos = nx.circular_layout(G) + assert isinstance( + nx.draw_networkx_nodes(G, pos, nodelist=[]), mpl.collections.PathCollection + ) + assert isinstance( + nx.draw_networkx_nodes(DG, pos, nodelist=[]), mpl.collections.PathCollection + ) + + # drawing empty edges used to return an empty LineCollection or empty list. + # Now it is always an empty list (because edges are now lists of FancyArrows) + assert nx.draw_networkx_edges(G, pos, edgelist=[], arrows=True) == [] + assert nx.draw_networkx_edges(G, pos, edgelist=[], arrows=False) == [] + assert nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=False) == [] + assert nx.draw_networkx_edges(DG, pos, edgelist=[], arrows=True) == [] + + +def test_multigraph_edgelist_tuples(): + # See Issue #3295 + G = nx.path_graph(3, create_using=nx.MultiDiGraph) + nx.draw_networkx(G, edgelist=[(0, 1, 0)]) + nx.draw_networkx(G, edgelist=[(0, 1, 0)], node_size=[10, 20, 0]) + + +def test_alpha_iter(): + pos = nx.random_layout(barbell) + fig = plt.figure() + # with fewer alpha elements than nodes + fig.add_subplot(131) # Each test in a new axis object + nx.draw_networkx_nodes(barbell, pos, alpha=[0.1, 0.2]) + # with equal alpha elements and nodes + num_nodes = len(barbell.nodes) + alpha = [x / num_nodes for x in range(num_nodes)] + colors = range(num_nodes) + fig.add_subplot(132) + nx.draw_networkx_nodes(barbell, pos, node_color=colors, alpha=alpha) + # with more alpha elements than nodes + alpha.append(1) + fig.add_subplot(133) + nx.draw_networkx_nodes(barbell, pos, alpha=alpha) + + +def test_multiple_node_shapes(subplots): + fig, ax = subplots + G = nx.path_graph(4) + nx.draw(G, node_shape=["o", "h", "s", "^"], ax=ax) + scatters = [ + s for s in ax.get_children() if isinstance(s, mpl.collections.PathCollection) + ] + assert len(scatters) == 4 + + +def test_individualized_font_attributes(subplots): + G = nx.karate_club_graph() + fig, ax = subplots + nx.draw( + G, + ax=ax, + font_color={n: "k" if n % 2 else "r" for n in G.nodes()}, + font_size={n: int(n / (34 / 15) + 5) for n in G.nodes()}, + ) + for n, t in zip( + G.nodes(), + [ + t + for t in ax.get_children() + if isinstance(t, mpl.text.Text) and len(t.get_text()) > 0 + ], + ): + expected = "black" if n % 2 else "red" + + assert mpl.colors.same_color(t.get_color(), expected) + assert int(n / (34 / 15) + 5) == t.get_size() + + +def test_individualized_edge_attributes(subplots): + G = nx.karate_club_graph() + fig, ax = subplots + arrowstyles = ["-|>" if (u + v) % 2 == 0 else "-[" for u, v in G.edges()] + arrowsizes = [10 * (u % 2 + v % 2) + 10 for u, v in G.edges()] + nx.draw(G, ax=ax, arrows=True, arrowstyle=arrowstyles, arrowsize=arrowsizes) + arrows = [ + f for f in ax.get_children() if isinstance(f, mpl.patches.FancyArrowPatch) + ] + for e, a in zip(G.edges(), arrows): + assert a.get_mutation_scale() == 10 * (e[0] % 2 + e[1] % 2) + 10 + expected = ( + mpl.patches.ArrowStyle.BracketB + if sum(e) % 2 + else mpl.patches.ArrowStyle.CurveFilledB + ) + assert isinstance(a.get_arrowstyle(), expected) + + +def test_error_invalid_kwds(): + with pytest.raises(ValueError, match="Received invalid argument"): + nx.draw(barbell, foo="bar") + + +def test_draw_networkx_arrowsize_incorrect_size(): + G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 3)]) + arrowsize = [1, 2, 3] + with pytest.raises( + ValueError, match="arrowsize should have the same length as edgelist" + ): + nx.draw(G, arrowsize=arrowsize) + + +@pytest.mark.parametrize("arrowsize", (30, [10, 20, 30])) +def test_draw_edges_arrowsize(arrowsize): + G = nx.DiGraph([(0, 1), (0, 2), (1, 2)]) + pos = {0: (0, 0), 1: (0, 1), 2: (1, 0)} + edges = nx.draw_networkx_edges(G, pos=pos, arrowsize=arrowsize) + + arrowsize = itertools.repeat(arrowsize) if isinstance(arrowsize, int) else arrowsize + + for fap, expected in zip(edges, arrowsize): + assert isinstance(fap, mpl.patches.FancyArrowPatch) + assert fap.get_mutation_scale() == expected + + +@pytest.mark.parametrize("arrowstyle", ("-|>", ["-|>", "-[", "<|-|>"])) +def test_draw_edges_arrowstyle(arrowstyle): + G = nx.DiGraph([(0, 1), (0, 2), (1, 2)]) + pos = {0: (0, 0), 1: (0, 1), 2: (1, 0)} + edges = nx.draw_networkx_edges(G, pos=pos, arrowstyle=arrowstyle) + + arrowstyle = ( + itertools.repeat(arrowstyle) if isinstance(arrowstyle, str) else arrowstyle + ) + + arrow_objects = { + "-|>": mpl.patches.ArrowStyle.CurveFilledB, + "-[": mpl.patches.ArrowStyle.BracketB, + "<|-|>": mpl.patches.ArrowStyle.CurveFilledAB, + } + + for fap, expected in zip(edges, arrowstyle): + assert isinstance(fap, mpl.patches.FancyArrowPatch) + assert isinstance(fap.get_arrowstyle(), arrow_objects[expected]) + + +def test_np_edgelist(): + # see issue #4129 + nx.draw_networkx(barbell, edgelist=np.array([(0, 2), (0, 3)])) + + +def test_draw_nodes_missing_node_from_position(): + G = nx.path_graph(3) + pos = {0: (0, 0), 1: (1, 1)} # No position for node 2 + with pytest.raises(nx.NetworkXError, match="has no position"): + nx.draw_networkx_nodes(G, pos) + + +# NOTE: parametrizing on marker to test both branches of internal +# nx.draw_networkx_edges.to_marker_edge function +@pytest.mark.parametrize("node_shape", ("o", "s")) +def test_draw_edges_min_source_target_margins(node_shape, subplots): + """Test that there is a wider gap between the node and the start of an + incident edge when min_source_margin is specified. + + This test checks that the use of min_{source/target}_margin kwargs result + in shorter (more padding) between the edges and source and target nodes. + As a crude visual example, let 's' and 't' represent source and target + nodes, respectively: + + Default: + s-----------------------------t + + With margins: + s ----------------------- t + + """ + # Create a single axis object to get consistent pixel coords across + # multiple draws + fig, ax = subplots + G = nx.DiGraph([(0, 1)]) + pos = {0: (0, 0), 1: (1, 0)} # horizontal layout + # Get leftmost and rightmost points of the FancyArrowPatch object + # representing the edge between nodes 0 and 1 (in pixel coordinates) + default_patch = nx.draw_networkx_edges(G, pos, ax=ax, node_shape=node_shape)[0] + default_extent = default_patch.get_extents().corners()[::2, 0] + # Now, do the same but with "padding" for the source and target via the + # min_{source/target}_margin kwargs + padded_patch = nx.draw_networkx_edges( + G, + pos, + ax=ax, + node_shape=node_shape, + min_source_margin=100, + min_target_margin=100, + )[0] + padded_extent = padded_patch.get_extents().corners()[::2, 0] + + # With padding, the left-most extent of the edge should be further to the + # right + assert padded_extent[0] > default_extent[0] + # And the rightmost extent of the edge, further to the left + assert padded_extent[1] < default_extent[1] + + +# NOTE: parametrizing on marker to test both branches of internal +# nx.draw_networkx_edges.to_marker_edge function +@pytest.mark.parametrize("node_shape", ("o", "s")) +def test_draw_edges_min_source_target_margins_individual(node_shape, subplots): + """Test that there is a wider gap between the node and the start of an + incident edge when min_source_margin is specified. + + This test checks that the use of min_{source/target}_margin kwargs result + in shorter (more padding) between the edges and source and target nodes. + As a crude visual example, let 's' and 't' represent source and target + nodes, respectively: + + Default: + s-----------------------------t + + With margins: + s ----------------------- t + + """ + # Create a single axis object to get consistent pixel coords across + # multiple draws + fig, ax = subplots + G = nx.DiGraph([(0, 1), (1, 2)]) + pos = {0: (0, 0), 1: (1, 0), 2: (2, 0)} # horizontal layout + # Get leftmost and rightmost points of the FancyArrowPatch object + # representing the edge between nodes 0 and 1 (in pixel coordinates) + default_patch = nx.draw_networkx_edges(G, pos, ax=ax, node_shape=node_shape) + default_extent = [d.get_extents().corners()[::2, 0] for d in default_patch] + # Now, do the same but with "padding" for the source and target via the + # min_{source/target}_margin kwargs + padded_patch = nx.draw_networkx_edges( + G, + pos, + ax=ax, + node_shape=node_shape, + min_source_margin=[98, 102], + min_target_margin=[98, 102], + ) + padded_extent = [p.get_extents().corners()[::2, 0] for p in padded_patch] + for d, p in zip(default_extent, padded_extent): + # With padding, the left-most extent of the edge should be further to the + # right + assert p[0] > d[0] + # And the rightmost extent of the edge, further to the left + assert p[1] < d[1] + + +def test_nonzero_selfloop_with_single_node(subplots): + """Ensure that selfloop extent is non-zero when there is only one node.""" + # Create explicit axis object for test + fig, ax = subplots + # Graph with single node + self loop + G = nx.DiGraph() + G.add_node(0) + G.add_edge(0, 0) + # Draw + patch = nx.draw_networkx_edges(G, {0: (0, 0)})[0] + # The resulting patch must have non-zero extent + bbox = patch.get_extents() + assert bbox.width > 0 and bbox.height > 0 + + +def test_nonzero_selfloop_with_single_edge_in_edgelist(subplots): + """Ensure that selfloop extent is non-zero when only a single edge is + specified in the edgelist. + """ + # Create explicit axis object for test + fig, ax = subplots + # Graph with selfloop + G = nx.path_graph(2, create_using=nx.DiGraph) + G.add_edge(1, 1) + pos = {n: (n, n) for n in G.nodes} + # Draw only the selfloop edge via the `edgelist` kwarg + patch = nx.draw_networkx_edges(G, pos, edgelist=[(1, 1)])[0] + # The resulting patch must have non-zero extent + bbox = patch.get_extents() + assert bbox.width > 0 and bbox.height > 0 + + +def test_apply_alpha(): + """Test apply_alpha when there is a mismatch between the number of + supplied colors and elements. + """ + nodelist = [0, 1, 2] + colorlist = ["r", "g", "b"] + alpha = 0.5 + rgba_colors = nx.drawing.nx_pylab.apply_alpha(colorlist, alpha, nodelist) + assert all(rgba_colors[:, -1] == alpha) + + +def test_draw_edges_toggling_with_arrows_kwarg(): + """ + The `arrows` keyword argument is used as a 3-way switch to select which + type of object to use for drawing edges: + - ``arrows=None`` -> default (FancyArrowPatches for directed, else LineCollection) + - ``arrows=True`` -> FancyArrowPatches + - ``arrows=False`` -> LineCollection + """ + import matplotlib.collections + import matplotlib.patches + + UG = nx.path_graph(3) + DG = nx.path_graph(3, create_using=nx.DiGraph) + pos = {n: (n, n) for n in UG} + + # Use FancyArrowPatches when arrows=True, regardless of graph type + for G in (UG, DG): + edges = nx.draw_networkx_edges(G, pos, arrows=True) + assert len(edges) == len(G.edges) + assert isinstance(edges[0], mpl.patches.FancyArrowPatch) + + # Use LineCollection when arrows=False, regardless of graph type + for G in (UG, DG): + edges = nx.draw_networkx_edges(G, pos, arrows=False) + assert isinstance(edges, mpl.collections.LineCollection) + + # Default behavior when arrows=None: FAPs for directed, LC's for undirected + edges = nx.draw_networkx_edges(UG, pos) + assert isinstance(edges, mpl.collections.LineCollection) + edges = nx.draw_networkx_edges(DG, pos) + assert len(edges) == len(G.edges) + assert isinstance(edges[0], mpl.patches.FancyArrowPatch) + + +@pytest.mark.parametrize("drawing_func", (nx.draw, nx.draw_networkx)) +def test_draw_networkx_arrows_default_undirected(drawing_func, subplots): + import matplotlib.collections + + G = nx.path_graph(3) + fig, ax = subplots + drawing_func(G, ax=ax) + assert any(isinstance(c, mpl.collections.LineCollection) for c in ax.collections) + assert not ax.patches + + +@pytest.mark.parametrize("drawing_func", (nx.draw, nx.draw_networkx)) +def test_draw_networkx_arrows_default_directed(drawing_func, subplots): + import matplotlib.collections + + G = nx.path_graph(3, create_using=nx.DiGraph) + fig, ax = subplots + drawing_func(G, ax=ax) + assert not any( + isinstance(c, mpl.collections.LineCollection) for c in ax.collections + ) + assert ax.patches + + +def test_edgelist_kwarg_not_ignored(subplots): + # See gh-4994 + G = nx.path_graph(3) + G.add_edge(0, 0) + fig, ax = subplots + nx.draw(G, edgelist=[(0, 1), (1, 2)], ax=ax) # Exclude self-loop from edgelist + assert not ax.patches + + +@pytest.mark.parametrize( + ("G", "expected_n_edges"), + ([nx.DiGraph(), 2], [nx.MultiGraph(), 4], [nx.MultiDiGraph(), 4]), +) +def test_draw_networkx_edges_multiedge_connectionstyle(G, expected_n_edges): + """Draws edges correctly for 3 types of graphs and checks for valid length""" + for i, (u, v) in enumerate([(0, 1), (0, 1), (0, 1), (0, 2)]): + G.add_edge(u, v, weight=round(i / 3, 2)) + pos = {n: (n, n) for n in G} + # Raises on insufficient connectionstyle length + for conn_style in [ + "arc3,rad=0.1", + ["arc3,rad=0.1", "arc3,rad=0.1"], + ["arc3,rad=0.1", "arc3,rad=0.1", "arc3,rad=0.2"], + ]: + nx.draw_networkx_edges(G, pos, connectionstyle=conn_style) + arrows = nx.draw_networkx_edges(G, pos, connectionstyle=conn_style) + assert len(arrows) == expected_n_edges + + +@pytest.mark.parametrize( + ("G", "expected_n_edges"), + ([nx.DiGraph(), 2], [nx.MultiGraph(), 4], [nx.MultiDiGraph(), 4]), +) +def test_draw_networkx_edge_labels_multiedge_connectionstyle(G, expected_n_edges): + """Draws labels correctly for 3 types of graphs and checks for valid length and class names""" + for i, (u, v) in enumerate([(0, 1), (0, 1), (0, 1), (0, 2)]): + G.add_edge(u, v, weight=round(i / 3, 2)) + pos = {n: (n, n) for n in G} + # Raises on insufficient connectionstyle length + arrows = nx.draw_networkx_edges( + G, pos, connectionstyle=["arc3,rad=0.1", "arc3,rad=0.1", "arc3,rad=0.1"] + ) + for conn_style in [ + "arc3,rad=0.1", + ["arc3,rad=0.1", "arc3,rad=0.2"], + ["arc3,rad=0.1", "arc3,rad=0.1", "arc3,rad=0.1"], + ]: + text_items = nx.draw_networkx_edge_labels(G, pos, connectionstyle=conn_style) + assert len(text_items) == expected_n_edges + for ti in text_items.values(): + assert ti.__class__.__name__ == "CurvedArrowText" + + +def test_draw_networkx_edge_label_multiedge(): + G = nx.MultiGraph() + G.add_edge(0, 1, weight=10) + G.add_edge(0, 1, weight=20) + edge_labels = nx.get_edge_attributes(G, "weight") # Includes edge keys + pos = {n: (n, n) for n in G} + text_items = nx.draw_networkx_edge_labels( + G, + pos, + edge_labels=edge_labels, + connectionstyle=["arc3,rad=0.1", "arc3,rad=0.2"], + ) + assert len(text_items) == 2 + + +def test_draw_networkx_edge_label_empty_dict(): + """Regression test for draw_networkx_edge_labels with empty dict. See + gh-5372.""" + G = nx.path_graph(3) + pos = {n: (n, n) for n in G.nodes} + assert nx.draw_networkx_edge_labels(G, pos, edge_labels={}) == {} + + +def test_draw_networkx_edges_undirected_selfloop_colors(subplots): + """When an edgelist is supplied along with a sequence of colors, check that + the self-loops have the correct colors.""" + fig, ax = subplots + # Edge list and corresponding colors + edgelist = [(1, 3), (1, 2), (2, 3), (1, 1), (3, 3), (2, 2)] + edge_colors = ["pink", "cyan", "black", "red", "blue", "green"] + + G = nx.Graph(edgelist) + pos = {n: (n, n) for n in G.nodes} + nx.draw_networkx_edges(G, pos, ax=ax, edgelist=edgelist, edge_color=edge_colors) + + # Verify that there are three fancy arrow patches (1 per self loop) + assert len(ax.patches) == 3 + + # These are points that should be contained in the self loops. For example, + # sl_points[0] will be (1, 1.1), which is inside the "path" of the first + # self-loop but outside the others + sl_points = np.array(edgelist[-3:]) + np.array([0, 0.1]) + + # Check that the mapping between self-loop locations and their colors is + # correct + for fap, clr, slp in zip(ax.patches, edge_colors[-3:], sl_points): + assert fap.get_path().contains_point(slp) + assert mpl.colors.same_color(fap.get_edgecolor(), clr) + + +@pytest.mark.parametrize( + "fap_only_kwarg", # Non-default values for kwargs that only apply to FAPs + ( + {"arrowstyle": "-"}, + {"arrowsize": 20}, + {"connectionstyle": "arc3,rad=0.2"}, + {"min_source_margin": 10}, + {"min_target_margin": 10}, + ), +) +def test_user_warnings_for_unused_edge_drawing_kwargs(fap_only_kwarg, subplots): + """Users should get a warning when they specify a non-default value for + one of the kwargs that applies only to edges drawn with FancyArrowPatches, + but FancyArrowPatches aren't being used under the hood.""" + G = nx.path_graph(3) + pos = {n: (n, n) for n in G} + fig, ax = subplots + # By default, an undirected graph will use LineCollection to represent + # the edges + kwarg_name = list(fap_only_kwarg.keys())[0] + with pytest.warns( + UserWarning, match=f"\n\nThe {kwarg_name} keyword argument is not applicable" + ): + nx.draw_networkx_edges(G, pos, ax=ax, **fap_only_kwarg) + # FancyArrowPatches are always used when `arrows=True` is specified. + # Check that warnings are *not* raised in this case + with warnings.catch_warnings(): + # Escalate warnings -> errors so tests fail if warnings are raised + warnings.simplefilter("error") + warnings.filterwarnings("ignore", category=DeprecationWarning) + nx.draw_networkx_edges(G, pos, ax=ax, arrows=True, **fap_only_kwarg) + + +@pytest.mark.parametrize("draw_fn", (nx.draw, nx.draw_circular)) +def test_no_warning_on_default_draw_arrowstyle(draw_fn, subplots): + # See gh-7284 + fig, ax = subplots + G = nx.cycle_graph(5) + with warnings.catch_warnings(record=True) as w: + draw_fn(G, ax=ax) + assert len(w) == 0 + + +@pytest.mark.parametrize("hide_ticks", [False, True]) +@pytest.mark.parametrize( + "method", + [ + nx.draw_networkx, + nx.draw_networkx_edge_labels, + nx.draw_networkx_edges, + nx.draw_networkx_labels, + nx.draw_networkx_nodes, + ], +) +def test_hide_ticks(method, hide_ticks, subplots): + G = nx.path_graph(3) + pos = {n: (n, n) for n in G.nodes} + _, ax = subplots + method(G, pos=pos, ax=ax, hide_ticks=hide_ticks) + for axis in [ax.xaxis, ax.yaxis]: + assert bool(axis.get_ticklabels()) != hide_ticks + + +def test_edge_label_bar_connectionstyle(subplots): + """Check that FancyArrowPatches with `bar` connectionstyle are also supported + in edge label rendering. See gh-7735.""" + fig, ax = subplots + edge = (0, 1) + G = nx.DiGraph([edge]) + pos = {n: (n, 0) for n in G} # Edge is horizontal line between (0, 0) and (1, 0) + + style_arc = "arc3,rad=0.0" + style_bar = "bar,fraction=0.1" + + arc_lbl = nx.draw_networkx_edge_labels( + G, pos, edge_labels={edge: "edge"}, connectionstyle=style_arc + ) + # This would fail prior to gh-7739 + bar_lbl = nx.draw_networkx_edge_labels( + G, pos, edge_labels={edge: "edge"}, connectionstyle=style_bar + ) + + # For the "arc" style, the label should be at roughly the midpoint + assert arc_lbl[edge].x, arc_lbl[edge].y == pytest.approx((0.5, 0)) + # The label should be below the x-axis for the "bar" style + assert bar_lbl[edge].y < arc_lbl[edge].y diff --git a/.venv/lib/python3.12/site-packages/networkx/exception.py b/.venv/lib/python3.12/site-packages/networkx/exception.py new file mode 100644 index 0000000..c960cf1 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/exception.py @@ -0,0 +1,131 @@ +""" +********** +Exceptions +********** + +Base exceptions and errors for NetworkX. +""" + +__all__ = [ + "HasACycle", + "NodeNotFound", + "PowerIterationFailedConvergence", + "ExceededMaxIterations", + "AmbiguousSolution", + "NetworkXAlgorithmError", + "NetworkXException", + "NetworkXError", + "NetworkXNoCycle", + "NetworkXNoPath", + "NetworkXNotImplemented", + "NetworkXPointlessConcept", + "NetworkXUnbounded", + "NetworkXUnfeasible", +] + + +class NetworkXException(Exception): + """Base class for exceptions in NetworkX.""" + + +class NetworkXError(NetworkXException): + """Exception for a serious error in NetworkX""" + + +class NetworkXPointlessConcept(NetworkXException): + """Raised when a null graph is provided as input to an algorithm + that cannot use it. + + The null graph is sometimes considered a pointless concept [1]_, + thus the name of the exception. + + Notes + ----- + Null graphs and empty graphs are often used interchangeably but they + are well defined in NetworkX. An ``empty_graph`` is a graph with ``n`` nodes + and 0 edges, and a ``null_graph`` is a graph with 0 nodes and 0 edges. + + References + ---------- + .. [1] Harary, F. and Read, R. "Is the Null Graph a Pointless + Concept?" In Graphs and Combinatorics Conference, George + Washington University. New York: Springer-Verlag, 1973. + + """ + + +class NetworkXAlgorithmError(NetworkXException): + """Exception for unexpected termination of algorithms.""" + + +class NetworkXUnfeasible(NetworkXAlgorithmError): + """Exception raised by algorithms trying to solve a problem + instance that has no feasible solution.""" + + +class NetworkXNoPath(NetworkXUnfeasible): + """Exception for algorithms that should return a path when running + on graphs where such a path does not exist.""" + + +class NetworkXNoCycle(NetworkXUnfeasible): + """Exception for algorithms that should return a cycle when running + on graphs where such a cycle does not exist.""" + + +class HasACycle(NetworkXException): + """Raised if a graph has a cycle when an algorithm expects that it + will have no cycles. + + """ + + +class NetworkXUnbounded(NetworkXAlgorithmError): + """Exception raised by algorithms trying to solve a maximization + or a minimization problem instance that is unbounded.""" + + +class NetworkXNotImplemented(NetworkXException): + """Exception raised by algorithms not implemented for a type of graph.""" + + +class NodeNotFound(NetworkXException): + """Exception raised if requested node is not present in the graph""" + + +class AmbiguousSolution(NetworkXException): + """Raised if more than one valid solution exists for an intermediary step + of an algorithm. + + In the face of ambiguity, refuse the temptation to guess. + This may occur, for example, when trying to determine the + bipartite node sets in a disconnected bipartite graph when + computing bipartite matchings. + + """ + + +class ExceededMaxIterations(NetworkXException): + """Raised if a loop iterates too many times without breaking. + + This may occur, for example, in an algorithm that computes + progressively better approximations to a value but exceeds an + iteration bound specified by the user. + + """ + + +class PowerIterationFailedConvergence(ExceededMaxIterations): + """Raised when the power iteration method fails to converge within a + specified iteration limit. + + `num_iterations` is the number of iterations that have been + completed when this exception was raised. + + """ + + def __init__(self, num_iterations, *args, **kw): + msg = f"power iteration failed to converge within {num_iterations} iterations" + exception_message = msg + superinit = super().__init__ + superinit(self, exception_message, *args, **kw) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__init__.py b/.venv/lib/python3.12/site-packages/networkx/generators/__init__.py new file mode 100644 index 0000000..6ec027c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/__init__.py @@ -0,0 +1,34 @@ +""" +A package for generating various graphs in networkx. + +""" + +from networkx.generators.atlas import * +from networkx.generators.classic import * +from networkx.generators.cographs import * +from networkx.generators.community import * +from networkx.generators.degree_seq import * +from networkx.generators.directed import * +from networkx.generators.duplication import * +from networkx.generators.ego import * +from networkx.generators.expanders import * +from networkx.generators.geometric import * +from networkx.generators.harary_graph import * +from networkx.generators.internet_as_graphs import * +from networkx.generators.intersection import * +from networkx.generators.interval_graph import * +from networkx.generators.joint_degree_seq import * +from networkx.generators.lattice import * +from networkx.generators.line import * +from networkx.generators.mycielski import * +from networkx.generators.nonisomorphic_trees import * +from networkx.generators.random_clustered import * +from networkx.generators.random_graphs import * +from networkx.generators.small import * +from networkx.generators.social import * +from networkx.generators.spectral_graph_forge import * +from networkx.generators.stochastic import * +from networkx.generators.sudoku import * +from networkx.generators.time_series import * +from networkx.generators.trees import * +from networkx.generators.triads import * diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..9aeff5c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/atlas.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/atlas.cpython-312.pyc new file mode 100644 index 0000000..83dbc28 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/atlas.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/classic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/classic.cpython-312.pyc new file mode 100644 index 0000000..eda2ec4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/classic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/cographs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/cographs.cpython-312.pyc new file mode 100644 index 0000000..4f53c1e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/cographs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/community.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/community.cpython-312.pyc new file mode 100644 index 0000000..4fade40 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/community.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/degree_seq.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/degree_seq.cpython-312.pyc new file mode 100644 index 0000000..5844311 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/degree_seq.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/directed.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/directed.cpython-312.pyc new file mode 100644 index 0000000..0333e6e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/directed.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/duplication.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/duplication.cpython-312.pyc new file mode 100644 index 0000000..317ebcf Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/duplication.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/ego.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/ego.cpython-312.pyc new file mode 100644 index 0000000..6d14b60 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/ego.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/expanders.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/expanders.cpython-312.pyc new file mode 100644 index 0000000..0cb93cc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/expanders.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/geometric.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/geometric.cpython-312.pyc new file mode 100644 index 0000000..10423a0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/geometric.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/harary_graph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/harary_graph.cpython-312.pyc new file mode 100644 index 0000000..6434fa9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/harary_graph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/internet_as_graphs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/internet_as_graphs.cpython-312.pyc new file mode 100644 index 0000000..6756f3f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/internet_as_graphs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/intersection.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/intersection.cpython-312.pyc new file mode 100644 index 0000000..579b9ba Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/intersection.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/interval_graph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/interval_graph.cpython-312.pyc new file mode 100644 index 0000000..63f1f14 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/interval_graph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/joint_degree_seq.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/joint_degree_seq.cpython-312.pyc new file mode 100644 index 0000000..bcd1300 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/joint_degree_seq.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/lattice.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/lattice.cpython-312.pyc new file mode 100644 index 0000000..929aa4a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/lattice.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/line.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/line.cpython-312.pyc new file mode 100644 index 0000000..414b254 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/line.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/mycielski.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/mycielski.cpython-312.pyc new file mode 100644 index 0000000..70967dc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/mycielski.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/nonisomorphic_trees.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/nonisomorphic_trees.cpython-312.pyc new file mode 100644 index 0000000..b140d91 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/nonisomorphic_trees.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/random_clustered.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/random_clustered.cpython-312.pyc new file mode 100644 index 0000000..6f0332a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/random_clustered.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/random_graphs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/random_graphs.cpython-312.pyc new file mode 100644 index 0000000..28961e9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/random_graphs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/small.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/small.cpython-312.pyc new file mode 100644 index 0000000..0766378 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/small.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/social.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/social.cpython-312.pyc new file mode 100644 index 0000000..271647f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/social.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/spectral_graph_forge.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/spectral_graph_forge.cpython-312.pyc new file mode 100644 index 0000000..c5fd47a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/spectral_graph_forge.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/stochastic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/stochastic.cpython-312.pyc new file mode 100644 index 0000000..6306601 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/stochastic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/sudoku.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/sudoku.cpython-312.pyc new file mode 100644 index 0000000..cdf0c22 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/sudoku.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/time_series.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/time_series.cpython-312.pyc new file mode 100644 index 0000000..ea7e07b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/time_series.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/trees.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/trees.cpython-312.pyc new file mode 100644 index 0000000..71cfb98 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/trees.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/triads.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/triads.cpython-312.pyc new file mode 100644 index 0000000..22d8d6f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/__pycache__/triads.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/atlas.dat.gz b/.venv/lib/python3.12/site-packages/networkx/generators/atlas.dat.gz new file mode 100644 index 0000000..b0a9870 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/atlas.dat.gz differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/atlas.py b/.venv/lib/python3.12/site-packages/networkx/generators/atlas.py new file mode 100644 index 0000000..000d478 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/atlas.py @@ -0,0 +1,227 @@ +""" +Generators for the small graph atlas. +""" + +import gzip +import importlib.resources +from itertools import islice + +import networkx as nx + +__all__ = ["graph_atlas", "graph_atlas_g"] + +#: The total number of graphs in the atlas. +#: +#: The graphs are labeled starting from 0 and extending to (but not +#: including) this number. +NUM_GRAPHS = 1253 + +#: The path to the data file containing the graph edge lists. +#: +#: This is the absolute path of the gzipped text file containing the +#: edge list for each graph in the atlas. The file contains one entry +#: per graph in the atlas, in sequential order, starting from graph +#: number 0 and extending through graph number 1252 (see +#: :data:`NUM_GRAPHS`). Each entry looks like +#: +#: .. sourcecode:: text +#: +#: GRAPH 6 +#: NODES 3 +#: 0 1 +#: 0 2 +#: +#: where the first two lines are the graph's index in the atlas and the +#: number of nodes in the graph, and the remaining lines are the edge +#: list. +#: +#: This file was generated from a Python list of graphs via code like +#: the following:: +#: +#: import gzip +#: from networkx.generators.atlas import graph_atlas_g +#: from networkx.readwrite.edgelist import write_edgelist +#: +#: with gzip.open('atlas.dat.gz', 'wb') as f: +#: for i, G in enumerate(graph_atlas_g()): +#: f.write(bytes(f'GRAPH {i}\n', encoding='utf-8')) +#: f.write(bytes(f'NODES {len(G)}\n', encoding='utf-8')) +#: write_edgelist(G, f, data=False) +#: + +# Path to the atlas file +ATLAS_FILE = importlib.resources.files("networkx.generators") / "atlas.dat.gz" + + +def _generate_graphs(): + """Sequentially read the file containing the edge list data for the + graphs in the atlas and generate the graphs one at a time. + + This function reads the file given in :data:`.ATLAS_FILE`. + + """ + with gzip.open(ATLAS_FILE, "rb") as f: + line = f.readline() + while line and line.startswith(b"GRAPH"): + # The first two lines of each entry tell us the index of the + # graph in the list and the number of nodes in the graph. + # They look like this: + # + # GRAPH 3 + # NODES 2 + # + graph_index = int(line[6:].rstrip()) + line = f.readline() + num_nodes = int(line[6:].rstrip()) + # The remaining lines contain the edge list, until the next + # GRAPH line (or until the end of the file). + edgelist = [] + line = f.readline() + while line and not line.startswith(b"GRAPH"): + edgelist.append(line.rstrip()) + line = f.readline() + G = nx.Graph() + G.name = f"G{graph_index}" + G.add_nodes_from(range(num_nodes)) + G.add_edges_from(tuple(map(int, e.split())) for e in edgelist) + yield G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def graph_atlas(i): + """Returns graph number `i` from the Graph Atlas. + + For more information, see :func:`.graph_atlas_g`. + + Parameters + ---------- + i : int + The index of the graph from the atlas to get. The graph at index + 0 is assumed to be the null graph. + + Returns + ------- + list + A list of :class:`~networkx.Graph` objects, the one at index *i* + corresponding to the graph *i* in the Graph Atlas. + + See also + -------- + graph_atlas_g + + Notes + ----- + The time required by this function increases linearly with the + argument `i`, since it reads a large file sequentially in order to + generate the graph [1]_. + + References + ---------- + .. [1] Ronald C. Read and Robin J. Wilson, *An Atlas of Graphs*. + Oxford University Press, 1998. + + """ + if not (0 <= i < NUM_GRAPHS): + raise ValueError(f"index must be between 0 and {NUM_GRAPHS}") + return next(islice(_generate_graphs(), i, None)) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def graph_atlas_g(): + """Returns the list of all graphs with up to seven nodes named in the + Graph Atlas. + + The graphs are listed in increasing order by + + 1. number of nodes, + 2. number of edges, + 3. degree sequence (for example 111223 < 112222), + 4. number of automorphisms, + + in that order, with three exceptions as described in the *Notes* + section below. This causes the list to correspond with the index of + the graphs in the Graph Atlas [atlas]_, with the first graph, + ``G[0]``, being the null graph. + + Returns + ------- + list + A list of :class:`~networkx.Graph` objects, the one at index *i* + corresponding to the graph *i* in the Graph Atlas. + + Examples + -------- + >>> from pprint import pprint + >>> atlas = nx.graph_atlas_g() + + There are 1253 graphs in the atlas + + >>> len(atlas) + 1253 + + The number of graphs with *n* nodes, where *n* ranges from 0 to 7: + + >>> from collections import Counter + >>> num_nodes_per_graph = [len(G) for G in atlas] + >>> Counter(num_nodes_per_graph) + Counter({7: 1044, 6: 156, 5: 34, 4: 11, 3: 4, 2: 2, 0: 1, 1: 1}) + + Since the atlas is ordered by the number of nodes in the graph, all graphs + with *n* nodes can be obtained by slicing the atlas. For example, all + graphs with 5 nodes: + + >>> G5_list = atlas[19:53] + >>> all(len(G) == 5 for G in G5_list) + True + + Or all graphs with at least 3 nodes but fewer than 7 nodes: + + >>> G3_6_list = atlas[4:209] + + More generally, the indices that partition the atlas by the number of nodes + per graph: + + >>> import itertools + >>> partition_indices = [0] + list( + ... itertools.accumulate(Counter(num_nodes_per_graph).values()) # cumsum + ... ) + >>> partition_indices + [0, 1, 2, 4, 8, 19, 53, 209, 1253] + >>> partition_mapping = dict(enumerate(itertools.pairwise(partition_indices))) + >>> pprint(partition_mapping) + {0: (0, 1), + 1: (1, 2), + 2: (2, 4), + 3: (4, 8), + 4: (8, 19), + 5: (19, 53), + 6: (53, 209), + 7: (209, 1253)} + + See also + -------- + graph_atlas + + Notes + ----- + This function may be expensive in both time and space, since it + reads a large file sequentially in order to populate the list. + + Although the NetworkX atlas functions match the order of graphs + given in the "Atlas of Graphs" book, there are (at least) three + errors in the ordering described in the book. The following three + pairs of nodes violate the lexicographically nondecreasing sorted + degree sequence rule: + + - graphs 55 and 56 with degree sequences 001111 and 000112, + - graphs 1007 and 1008 with degree sequences 3333444 and 3333336, + - graphs 1012 and 1213 with degree sequences 1244555 and 1244456. + + References + ---------- + .. [atlas] Ronald C. Read and Robin J. Wilson, + *An Atlas of Graphs*. + Oxford University Press, 1998. + + """ + return list(_generate_graphs()) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/classic.py b/.venv/lib/python3.12/site-packages/networkx/generators/classic.py new file mode 100644 index 0000000..a461e7b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/classic.py @@ -0,0 +1,1068 @@ +"""Generators for some classic graphs. + +The typical graph builder function is called as follows: + +>>> G = nx.complete_graph(100) + +returning the complete graph on n nodes labeled 0, .., 99 +as a simple graph. Except for `empty_graph`, all the functions +in this module return a Graph class (i.e. a simple, undirected graph). + +""" + +import itertools +import numbers + +import networkx as nx +from networkx.classes import Graph +from networkx.exception import NetworkXError +from networkx.utils import nodes_or_number, pairwise + +__all__ = [ + "balanced_tree", + "barbell_graph", + "binomial_tree", + "complete_graph", + "complete_multipartite_graph", + "circular_ladder_graph", + "circulant_graph", + "cycle_graph", + "dorogovtsev_goltsev_mendes_graph", + "empty_graph", + "full_rary_tree", + "kneser_graph", + "ladder_graph", + "lollipop_graph", + "null_graph", + "path_graph", + "star_graph", + "tadpole_graph", + "trivial_graph", + "turan_graph", + "wheel_graph", +] + + +# ------------------------------------------------------------------- +# Some Classic Graphs +# ------------------------------------------------------------------- + + +def _tree_edges(n, r): + if n == 0: + return + # helper function for trees + # yields edges in rooted tree at 0 with n nodes and branching ratio r + nodes = iter(range(n)) + parents = [next(nodes)] # stack of max length r + while parents: + source = parents.pop(0) + for i in range(r): + try: + target = next(nodes) + parents.append(target) + yield source, target + except StopIteration: + break + + +@nx._dispatchable(graphs=None, returns_graph=True) +def full_rary_tree(r, n, create_using=None): + """Creates a full r-ary tree of `n` nodes. + + Sometimes called a k-ary, n-ary, or m-ary tree. + "... all non-leaf nodes have exactly r children and all levels + are full except for some rightmost position of the bottom level + (if a leaf at the bottom level is missing, then so are all of the + leaves to its right." [1]_ + + .. plot:: + + >>> nx.draw(nx.full_rary_tree(2, 10)) + + Parameters + ---------- + r : int + branching factor of the tree + n : int + Number of nodes in the tree + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + An r-ary tree with n nodes + + References + ---------- + .. [1] An introduction to data structures and algorithms, + James Andrew Storer, Birkhauser Boston 2001, (page 225). + """ + G = empty_graph(n, create_using) + G.add_edges_from(_tree_edges(n, r)) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def kneser_graph(n, k): + """Returns the Kneser Graph with parameters `n` and `k`. + + The Kneser Graph has nodes that are k-tuples (subsets) of the integers + between 0 and ``n-1``. Nodes are adjacent if their corresponding sets are disjoint. + + Parameters + ---------- + n: int + Number of integers from which to make node subsets. + Subsets are drawn from ``set(range(n))``. + k: int + Size of the subsets. + + Returns + ------- + G : NetworkX Graph + + Examples + -------- + >>> G = nx.kneser_graph(5, 2) + >>> G.number_of_nodes() + 10 + >>> G.number_of_edges() + 15 + >>> nx.is_isomorphic(G, nx.petersen_graph()) + True + """ + if n <= 0: + raise NetworkXError("n should be greater than zero") + if k <= 0 or k > n: + raise NetworkXError("k should be greater than zero and smaller than n") + + G = nx.Graph() + # Create all k-subsets of [0, 1, ..., n-1] + subsets = list(itertools.combinations(range(n), k)) + + if 2 * k > n: + G.add_nodes_from(subsets) + + universe = set(range(n)) + comb = itertools.combinations # only to make it all fit on one line + G.add_edges_from((s, t) for s in subsets for t in comb(universe - set(s), k)) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def balanced_tree(r, h, create_using=None): + """Returns the perfectly balanced `r`-ary tree of height `h`. + + .. plot:: + + >>> nx.draw(nx.balanced_tree(2, 3)) + + Parameters + ---------- + r : int + Branching factor of the tree; each node will have `r` + children. + + h : int + Height of the tree. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : NetworkX graph + A balanced `r`-ary tree of height `h`. + + Notes + ----- + This is the rooted tree where all leaves are at distance `h` from + the root. The root has degree `r` and all other internal nodes + have degree `r + 1`. + + Node labels are integers, starting from zero. + + A balanced tree is also known as a *complete r-ary tree*. + + """ + # The number of nodes in the balanced tree is `1 + r + ... + r^h`, + # which is computed by using the closed-form formula for a geometric + # sum with ratio `r`. In the special case that `r` is 1, the number + # of nodes is simply `h + 1` (since the tree is actually a path + # graph). + if r == 1: + n = h + 1 + else: + # This must be an integer if both `r` and `h` are integers. If + # they are not, we force integer division anyway. + n = (1 - r ** (h + 1)) // (1 - r) + return full_rary_tree(r, n, create_using=create_using) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def barbell_graph(m1, m2, create_using=None): + """Returns the Barbell Graph: two complete graphs connected by a path. + + .. plot:: + + >>> nx.draw(nx.barbell_graph(4, 2)) + + Parameters + ---------- + m1 : int + Size of the left and right barbells, must be greater than 2. + + m2 : int + Length of the path connecting the barbells. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Only undirected Graphs are supported. + + Returns + ------- + G : NetworkX graph + A barbell graph. + + Notes + ----- + + + Two identical complete graphs $K_{m1}$ form the left and right bells, + and are connected by a path $P_{m2}$. + + The `2*m1+m2` nodes are numbered + `0, ..., m1-1` for the left barbell, + `m1, ..., m1+m2-1` for the path, + and `m1+m2, ..., 2*m1+m2-1` for the right barbell. + + The 3 subgraphs are joined via the edges `(m1-1, m1)` and + `(m1+m2-1, m1+m2)`. If `m2=0`, this is merely two complete + graphs joined together. + + This graph is an extremal example in David Aldous + and Jim Fill's e-text on Random Walks on Graphs. + + """ + if m1 < 2: + raise NetworkXError("Invalid graph description, m1 should be >=2") + if m2 < 0: + raise NetworkXError("Invalid graph description, m2 should be >=0") + + # left barbell + G = complete_graph(m1, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + + # connecting path + G.add_nodes_from(range(m1, m1 + m2 - 1)) + if m2 > 1: + G.add_edges_from(pairwise(range(m1, m1 + m2))) + + # right barbell + G.add_edges_from( + (u, v) for u in range(m1 + m2, 2 * m1 + m2) for v in range(u + 1, 2 * m1 + m2) + ) + + # connect it up + G.add_edge(m1 - 1, m1) + if m2 > 0: + G.add_edge(m1 + m2 - 1, m1 + m2) + + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def binomial_tree(n, create_using=None): + """Returns the Binomial Tree of order n. + + The binomial tree of order 0 consists of a single node. A binomial tree of order k + is defined recursively by linking two binomial trees of order k-1: the root of one is + the leftmost child of the root of the other. + + .. plot:: + + >>> nx.draw(nx.binomial_tree(3)) + + Parameters + ---------- + n : int + Order of the binomial tree. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : NetworkX graph + A binomial tree of $2^n$ nodes and $2^n - 1$ edges. + + """ + G = nx.empty_graph(1, create_using) + + N = 1 + for i in range(n): + # Use G.edges() to ensure 2-tuples. G.edges is 3-tuple for MultiGraph + edges = [(u + N, v + N) for (u, v) in G.edges()] + G.add_edges_from(edges) + G.add_edge(0, N) + N *= 2 + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number(0) +def complete_graph(n, create_using=None): + """Return the complete graph `K_n` with n nodes. + + A complete graph on `n` nodes means that all pairs + of distinct nodes have an edge connecting them. + + .. plot:: + + >>> nx.draw(nx.complete_graph(5)) + + Parameters + ---------- + n : int or iterable container of nodes + If n is an integer, nodes are from range(n). + If n is a container of nodes, those nodes appear in the graph. + Warning: n is not checked for duplicates and if present the + resulting graph may not be as desired. Make sure you have no duplicates. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Examples + -------- + >>> G = nx.complete_graph(9) + >>> len(G) + 9 + >>> G.size() + 36 + >>> G = nx.complete_graph(range(11, 14)) + >>> list(G.nodes()) + [11, 12, 13] + >>> G = nx.complete_graph(4, nx.DiGraph()) + >>> G.is_directed() + True + + """ + _, nodes = n + G = empty_graph(nodes, create_using) + if len(nodes) > 1: + if G.is_directed(): + edges = itertools.permutations(nodes, 2) + else: + edges = itertools.combinations(nodes, 2) + G.add_edges_from(edges) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def circular_ladder_graph(n, create_using=None): + """Returns the circular ladder graph $CL_n$ of length n. + + $CL_n$ consists of two concentric n-cycles in which + each of the n pairs of concentric nodes are joined by an edge. + + Node labels are the integers 0 to n-1 + + .. plot:: + + >>> nx.draw(nx.circular_ladder_graph(5)) + + """ + G = ladder_graph(n, create_using) + G.add_edge(0, n - 1) + G.add_edge(n, 2 * n - 1) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def circulant_graph(n, offsets, create_using=None): + r"""Returns the circulant graph $Ci_n(x_1, x_2, ..., x_m)$ with $n$ nodes. + + The circulant graph $Ci_n(x_1, ..., x_m)$ consists of $n$ nodes $0, ..., n-1$ + such that node $i$ is connected to nodes $(i + x) \mod n$ and $(i - x) \mod n$ + for all $x$ in $x_1, ..., x_m$. Thus $Ci_n(1)$ is a cycle graph. + + .. plot:: + + >>> nx.draw(nx.circulant_graph(10, [1])) + + Parameters + ---------- + n : integer + The number of nodes in the graph. + offsets : list of integers + A list of node offsets, $x_1$ up to $x_m$, as described above. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + NetworkX Graph of type create_using + + Examples + -------- + Many well-known graph families are subfamilies of the circulant graphs; + for example, to create the cycle graph on n points, we connect every + node to nodes on either side (with offset plus or minus one). For n = 10, + + >>> G = nx.circulant_graph(10, [1]) + >>> edges = [ + ... (0, 9), + ... (0, 1), + ... (1, 2), + ... (2, 3), + ... (3, 4), + ... (4, 5), + ... (5, 6), + ... (6, 7), + ... (7, 8), + ... (8, 9), + ... ] + >>> sorted(edges) == sorted(G.edges()) + True + + Similarly, we can create the complete graph + on 5 points with the set of offsets [1, 2]: + + >>> G = nx.circulant_graph(5, [1, 2]) + >>> edges = [ + ... (0, 1), + ... (0, 2), + ... (0, 3), + ... (0, 4), + ... (1, 2), + ... (1, 3), + ... (1, 4), + ... (2, 3), + ... (2, 4), + ... (3, 4), + ... ] + >>> sorted(edges) == sorted(G.edges()) + True + + """ + G = empty_graph(n, create_using) + for i in range(n): + for j in offsets: + G.add_edge(i, (i - j) % n) + G.add_edge(i, (i + j) % n) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number(0) +def cycle_graph(n, create_using=None): + """Returns the cycle graph $C_n$ of cyclically connected nodes. + + $C_n$ is a path with its two end-nodes connected. + + .. plot:: + + >>> nx.draw(nx.cycle_graph(5)) + + Parameters + ---------- + n : int or iterable container of nodes + If n is an integer, nodes are from `range(n)`. + If n is a container of nodes, those nodes appear in the graph. + Warning: n is not checked for duplicates and if present the + resulting graph may not be as desired. Make sure you have no duplicates. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Notes + ----- + If create_using is directed, the direction is in increasing order. + + """ + _, nodes = n + G = empty_graph(nodes, create_using) + G.add_edges_from(pairwise(nodes, cyclic=True)) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def dorogovtsev_goltsev_mendes_graph(n, create_using=None): + """Returns the hierarchically constructed Dorogovtsev--Goltsev--Mendes graph. + + The Dorogovtsev--Goltsev--Mendes [1]_ procedure deterministically produces a + scale-free graph with ``3/2 * (3**(n-1) + 1)`` nodes + and ``3**n`` edges for a given `n`. + + Note that `n` denotes the number of times the state transition is applied, + starting from the base graph with ``n = 0`` (no transitions), as in [2]_. + This is different from the parameter ``t = n - 1`` in [1]_. + + .. plot:: + + >>> nx.draw(nx.dorogovtsev_goltsev_mendes_graph(3)) + + Parameters + ---------- + n : integer + The generation number. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. Directed graphs and multigraphs are not supported. + + Returns + ------- + G : NetworkX `Graph` + + Raises + ------ + NetworkXError + If `n` is less than zero. + + If `create_using` is a directed graph or multigraph. + + Examples + -------- + >>> G = nx.dorogovtsev_goltsev_mendes_graph(3) + >>> G.number_of_nodes() + 15 + >>> G.number_of_edges() + 27 + >>> nx.is_planar(G) + True + + References + ---------- + .. [1] S. N. Dorogovtsev, A. V. Goltsev and J. F. F. Mendes, + "Pseudofractal scale-free web", Physical Review E 65, 066122, 2002. + https://arxiv.org/pdf/cond-mat/0112143.pdf + .. [2] Weisstein, Eric W. "Dorogovtsev--Goltsev--Mendes Graph". + From MathWorld--A Wolfram Web Resource. + https://mathworld.wolfram.com/Dorogovtsev-Goltsev-MendesGraph.html + """ + if n < 0: + raise NetworkXError("n must be greater than or equal to 0") + + G = empty_graph(0, create_using) + if G.is_directed(): + raise NetworkXError("directed graph not supported") + if G.is_multigraph(): + raise NetworkXError("multigraph not supported") + + G.add_edge(0, 1) + new_node = 2 # next node to be added + for _ in range(n): # iterate over number of generations. + new_edges = [] + for u, v in G.edges(): + new_edges.append((u, new_node)) + new_edges.append((v, new_node)) + new_node += 1 + + G.add_edges_from(new_edges) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number(0) +def empty_graph(n=0, create_using=None, default=Graph): + """Returns the empty graph with n nodes and zero edges. + + .. plot:: + + >>> nx.draw(nx.empty_graph(5)) + + Parameters + ---------- + n : int or iterable container of nodes (default = 0) + If n is an integer, nodes are from `range(n)`. + If n is a container of nodes, those nodes appear in the graph. + create_using : Graph Instance, Constructor or None + Indicator of type of graph to return. + If a Graph-type instance, then clear and use it. + If None, use the `default` constructor. + If a constructor, call it to create an empty graph. + default : Graph constructor (optional, default = nx.Graph) + The constructor to use if create_using is None. + If None, then nx.Graph is used. + This is used when passing an unknown `create_using` value + through your home-grown function to `empty_graph` and + you want a default constructor other than nx.Graph. + + Examples + -------- + >>> G = nx.empty_graph(10) + >>> G.number_of_nodes() + 10 + >>> G.number_of_edges() + 0 + >>> G = nx.empty_graph("ABC") + >>> G.number_of_nodes() + 3 + >>> sorted(G) + ['A', 'B', 'C'] + + Notes + ----- + The variable create_using should be a Graph Constructor or a + "graph"-like object. Constructors, e.g. `nx.Graph` or `nx.MultiGraph` + will be used to create the returned graph. "graph"-like objects + will be cleared (nodes and edges will be removed) and refitted as + an empty "graph" with nodes specified in n. This capability + is useful for specifying the class-nature of the resulting empty + "graph" (i.e. Graph, DiGraph, MyWeirdGraphClass, etc.). + + The variable create_using has three main uses: + Firstly, the variable create_using can be used to create an + empty digraph, multigraph, etc. For example, + + >>> n = 10 + >>> G = nx.empty_graph(n, create_using=nx.DiGraph) + + will create an empty digraph on n nodes. + + Secondly, one can pass an existing graph (digraph, multigraph, + etc.) via create_using. For example, if G is an existing graph + (resp. digraph, multigraph, etc.), then empty_graph(n, create_using=G) + will empty G (i.e. delete all nodes and edges using G.clear()) + and then add n nodes and zero edges, and return the modified graph. + + Thirdly, when constructing your home-grown graph creation function + you can use empty_graph to construct the graph by passing a user + defined create_using to empty_graph. In this case, if you want the + default constructor to be other than nx.Graph, specify `default`. + + >>> def mygraph(n, create_using=None): + ... G = nx.empty_graph(n, create_using, nx.MultiGraph) + ... G.add_edges_from([(0, 1), (0, 1)]) + ... return G + >>> G = mygraph(3) + >>> G.is_multigraph() + True + >>> G = mygraph(3, nx.Graph) + >>> G.is_multigraph() + False + + See also create_empty_copy(G). + + """ + if create_using is None: + G = default() + elif isinstance(create_using, type): + G = create_using() + elif not hasattr(create_using, "adj"): + raise TypeError("create_using is not a valid NetworkX graph type or instance") + else: + # create_using is a NetworkX style Graph + create_using.clear() + G = create_using + + _, nodes = n + G.add_nodes_from(nodes) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def ladder_graph(n, create_using=None): + """Returns the Ladder graph of length n. + + This is two paths of n nodes, with + each pair connected by a single edge. + + Node labels are the integers 0 to 2*n - 1. + + .. plot:: + + >>> nx.draw(nx.ladder_graph(5)) + + """ + G = empty_graph(2 * n, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + G.add_edges_from(pairwise(range(n))) + G.add_edges_from(pairwise(range(n, 2 * n))) + G.add_edges_from((v, v + n) for v in range(n)) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number([0, 1]) +def lollipop_graph(m, n, create_using=None): + """Returns the Lollipop Graph; ``K_m`` connected to ``P_n``. + + This is the Barbell Graph without the right barbell. + + .. plot:: + + >>> nx.draw(nx.lollipop_graph(3, 4)) + + Parameters + ---------- + m, n : int or iterable container of nodes + If an integer, nodes are from ``range(m)`` and ``range(m, m+n)``. + If a container of nodes, those nodes appear in the graph. + Warning: `m` and `n` are not checked for duplicates and if present the + resulting graph may not be as desired. Make sure you have no duplicates. + + The nodes for `m` appear in the complete graph $K_m$ and the nodes + for `n` appear in the path $P_n$ + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + Networkx graph + A complete graph with `m` nodes connected to a path of length `n`. + + Notes + ----- + The 2 subgraphs are joined via an edge ``(m-1, m)``. + If ``n=0``, this is merely a complete graph. + + (This graph is an extremal example in David Aldous and Jim + Fill's etext on Random Walks on Graphs.) + + """ + m, m_nodes = m + M = len(m_nodes) + if M < 2: + raise NetworkXError("Invalid description: m should indicate at least 2 nodes") + + n, n_nodes = n + if isinstance(m, numbers.Integral) and isinstance(n, numbers.Integral): + n_nodes = list(range(M, M + n)) + N = len(n_nodes) + + # the ball + G = complete_graph(m_nodes, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + + # the stick + G.add_nodes_from(n_nodes) + if N > 1: + G.add_edges_from(pairwise(n_nodes)) + + if len(G) != M + N: + raise NetworkXError("Nodes must be distinct in containers m and n") + + # connect ball to stick + if M > 0 and N > 0: + G.add_edge(m_nodes[-1], n_nodes[0]) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def null_graph(create_using=None): + """Returns the Null graph with no nodes or edges. + + See empty_graph for the use of create_using. + + """ + G = empty_graph(0, create_using) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number(0) +def path_graph(n, create_using=None): + """Returns the Path graph `P_n` of linearly connected nodes. + + .. plot:: + + >>> nx.draw(nx.path_graph(5)) + + Parameters + ---------- + n : int or iterable + If an integer, nodes are 0 to n - 1. + If an iterable of nodes, in the order they appear in the path. + Warning: n is not checked for duplicates and if present the + resulting graph may not be as desired. Make sure you have no duplicates. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + """ + _, nodes = n + G = empty_graph(nodes, create_using) + G.add_edges_from(pairwise(nodes)) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number(0) +def star_graph(n, create_using=None): + """Return the star graph + + The star graph consists of one center node connected to n outer nodes. + + .. plot:: + + >>> nx.draw(nx.star_graph(6)) + + Parameters + ---------- + n : int or iterable + If an integer, node labels are 0 to n with center 0. + If an iterable of nodes, the center is the first. + Warning: n is not checked for duplicates and if present the + resulting graph may not be as desired. Make sure you have no duplicates. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Notes + ----- + The graph has n+1 nodes for integer n. + So star_graph(3) is the same as star_graph(range(4)). + """ + n, nodes = n + if isinstance(n, numbers.Integral): + nodes.append(int(n)) # there should be n+1 nodes + G = empty_graph(nodes, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + + if len(nodes) > 1: + hub, *spokes = nodes + G.add_edges_from((hub, node) for node in spokes) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number([0, 1]) +def tadpole_graph(m, n, create_using=None): + """Returns the (m,n)-tadpole graph; ``C_m`` connected to ``P_n``. + + This graph on m+n nodes connects a cycle of size `m` to a path of length `n`. + It looks like a tadpole. It is also called a kite graph or a dragon graph. + + .. plot:: + + >>> nx.draw(nx.tadpole_graph(3, 5)) + + Parameters + ---------- + m, n : int or iterable container of nodes + If an integer, nodes are from ``range(m)`` and ``range(m,m+n)``. + If a container of nodes, those nodes appear in the graph. + Warning: `m` and `n` are not checked for duplicates and if present the + resulting graph may not be as desired. + + The nodes for `m` appear in the cycle graph $C_m$ and the nodes + for `n` appear in the path $P_n$. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + Networkx graph + A cycle of size `m` connected to a path of length `n`. + + Raises + ------ + NetworkXError + If ``m < 2``. The tadpole graph is undefined for ``m<2``. + + Notes + ----- + The 2 subgraphs are joined via an edge ``(m-1, m)``. + If ``n=0``, this is a cycle graph. + `m` and/or `n` can be a container of nodes instead of an integer. + + """ + m, m_nodes = m + M = len(m_nodes) + if M < 2: + raise NetworkXError("Invalid description: m should indicate at least 2 nodes") + + n, n_nodes = n + if isinstance(m, numbers.Integral) and isinstance(n, numbers.Integral): + n_nodes = list(range(M, M + n)) + + # the circle + G = cycle_graph(m_nodes, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + + # the stick + nx.add_path(G, [m_nodes[-1]] + list(n_nodes)) + + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def trivial_graph(create_using=None): + """Return the Trivial graph with one node (with label 0) and no edges. + + .. plot:: + + >>> nx.draw(nx.trivial_graph(), with_labels=True) + + """ + G = empty_graph(1, create_using) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def turan_graph(n, r): + r"""Return the Turan Graph + + The Turan Graph is a complete multipartite graph on $n$ nodes + with $r$ disjoint subsets. That is, edges connect each node to + every node not in its subset. + + Given $n$ and $r$, we create a complete multipartite graph with + $r-(n \mod r)$ partitions of size $n/r$, rounded down, and + $n \mod r$ partitions of size $n/r+1$, rounded down. + + .. plot:: + + >>> nx.draw(nx.turan_graph(6, 2)) + + Parameters + ---------- + n : int + The number of nodes. + r : int + The number of partitions. + Must be less than or equal to n. + + Notes + ----- + Must satisfy $1 <= r <= n$. + The graph has $(r-1)(n^2)/(2r)$ edges, rounded down. + """ + + if not 1 <= r <= n: + raise NetworkXError("Must satisfy 1 <= r <= n") + + partitions = [n // r] * (r - (n % r)) + [n // r + 1] * (n % r) + G = complete_multipartite_graph(*partitions) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number(0) +def wheel_graph(n, create_using=None): + """Return the wheel graph + + The wheel graph consists of a hub node connected to a cycle of (n-1) nodes. + + .. plot:: + + >>> nx.draw(nx.wheel_graph(5)) + + Parameters + ---------- + n : int or iterable + If an integer, node labels are 0 to n with center 0. + If an iterable of nodes, the center is the first. + Warning: n is not checked for duplicates and if present the + resulting graph may not be as desired. Make sure you have no duplicates. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Node labels are the integers 0 to n - 1. + """ + _, nodes = n + G = empty_graph(nodes, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + + if len(nodes) > 1: + hub, *rim = nodes + G.add_edges_from((hub, node) for node in rim) + if len(rim) > 1: + G.add_edges_from(pairwise(rim, cyclic=True)) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def complete_multipartite_graph(*subset_sizes): + """Returns the complete multipartite graph with the specified subset sizes. + + .. plot:: + + >>> nx.draw(nx.complete_multipartite_graph(1, 2, 3)) + + Parameters + ---------- + subset_sizes : tuple of integers or tuple of node iterables + The arguments can either all be integer number of nodes or they + can all be iterables of nodes. If integers, they represent the + number of nodes in each subset of the multipartite graph. + If iterables, each is used to create the nodes for that subset. + The length of subset_sizes is the number of subsets. + + Returns + ------- + G : NetworkX Graph + Returns the complete multipartite graph with the specified subsets. + + For each node, the node attribute 'subset' is an integer + indicating which subset contains the node. + + Examples + -------- + Creating a complete tripartite graph, with subsets of one, two, and three + nodes, respectively. + + >>> G = nx.complete_multipartite_graph(1, 2, 3) + >>> [G.nodes[u]["subset"] for u in G] + [0, 1, 1, 2, 2, 2] + >>> list(G.edges(0)) + [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5)] + >>> list(G.edges(2)) + [(2, 0), (2, 3), (2, 4), (2, 5)] + >>> list(G.edges(4)) + [(4, 0), (4, 1), (4, 2)] + + >>> G = nx.complete_multipartite_graph("a", "bc", "def") + >>> [G.nodes[u]["subset"] for u in sorted(G)] + [0, 1, 1, 2, 2, 2] + + Notes + ----- + This function generalizes several other graph builder functions. + + - If no subset sizes are given, this returns the null graph. + - If a single subset size `n` is given, this returns the empty graph on + `n` nodes. + - If two subset sizes `m` and `n` are given, this returns the complete + bipartite graph on `m + n` nodes. + - If subset sizes `1` and `n` are given, this returns the star graph on + `n + 1` nodes. + + See also + -------- + complete_bipartite_graph + """ + # The complete multipartite graph is an undirected simple graph. + G = Graph() + + if len(subset_sizes) == 0: + return G + + # set up subsets of nodes + try: + extents = pairwise(itertools.accumulate((0,) + subset_sizes)) + subsets = [range(start, end) for start, end in extents] + except TypeError: + subsets = subset_sizes + else: + if any(size < 0 for size in subset_sizes): + raise NetworkXError(f"Negative number of nodes not valid: {subset_sizes}") + + # add nodes with subset attribute + # while checking that ints are not mixed with iterables + try: + for i, subset in enumerate(subsets): + G.add_nodes_from(subset, subset=i) + except TypeError as err: + raise NetworkXError("Arguments must be all ints or all iterables") from err + + # Across subsets, all nodes should be adjacent. + # We can use itertools.combinations() because undirected. + for subset1, subset2 in itertools.combinations(subsets, 2): + G.add_edges_from(itertools.product(subset1, subset2)) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/cographs.py b/.venv/lib/python3.12/site-packages/networkx/generators/cographs.py new file mode 100644 index 0000000..6635b32 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/cographs.py @@ -0,0 +1,68 @@ +r"""Generators for cographs + +A cograph is a graph containing no path on four vertices. +Cographs or $P_4$-free graphs can be obtained from a single vertex +by disjoint union and complementation operations. + +References +---------- +.. [0] D.G. Corneil, H. Lerchs, L.Stewart Burlingham, + "Complement reducible graphs", + Discrete Applied Mathematics, Volume 3, Issue 3, 1981, Pages 163-174, + ISSN 0166-218X. +""" + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = ["random_cograph"] + + +@py_random_state(1) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_cograph(n, seed=None): + r"""Returns a random cograph with $2 ^ n$ nodes. + + A cograph is a graph containing no path on four vertices. + Cographs or $P_4$-free graphs can be obtained from a single vertex + by disjoint union and complementation operations. + + This generator starts off from a single vertex and performs disjoint + union and full join operations on itself. + The decision on which operation will take place is random. + + Parameters + ---------- + n : int + The order of the cograph. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : A random graph containing no path on four vertices. + + See Also + -------- + full_join + union + + References + ---------- + .. [1] D.G. Corneil, H. Lerchs, L.Stewart Burlingham, + "Complement reducible graphs", + Discrete Applied Mathematics, Volume 3, Issue 3, 1981, Pages 163-174, + ISSN 0166-218X. + """ + R = nx.empty_graph(1) + + for i in range(n): + RR = nx.relabel_nodes(R.copy(), lambda x: x + len(R)) + + if seed.randint(0, 1) == 0: + R = nx.full_join(R, RR) + else: + R = nx.disjoint_union(R, RR) + + return R diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/community.py b/.venv/lib/python3.12/site-packages/networkx/generators/community.py new file mode 100644 index 0000000..a7f2294 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/community.py @@ -0,0 +1,1070 @@ +"""Generators for classes of graphs used in studying social networks.""" + +import itertools +import math + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = [ + "caveman_graph", + "connected_caveman_graph", + "relaxed_caveman_graph", + "random_partition_graph", + "planted_partition_graph", + "gaussian_random_partition_graph", + "ring_of_cliques", + "windmill_graph", + "stochastic_block_model", + "LFR_benchmark_graph", +] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def caveman_graph(l, k): + """Returns a caveman graph of `l` cliques of size `k`. + + Parameters + ---------- + l : int + Number of cliques + k : int + Size of cliques + + Returns + ------- + G : NetworkX Graph + caveman graph + + Notes + ----- + This returns an undirected graph, it can be converted to a directed + graph using :func:`nx.to_directed`, or a multigraph using + ``nx.MultiGraph(nx.caveman_graph(l, k))``. Only the undirected version is + described in [1]_ and it is unclear which of the directed + generalizations is most useful. + + Examples + -------- + >>> G = nx.caveman_graph(3, 3) + + See also + -------- + + connected_caveman_graph + + References + ---------- + .. [1] Watts, D. J. 'Networks, Dynamics, and the Small-World Phenomenon.' + Amer. J. Soc. 105, 493-527, 1999. + """ + # l disjoint cliques of size k + G = nx.empty_graph(l * k) + if k > 1: + for start in range(0, l * k, k): + edges = itertools.combinations(range(start, start + k), 2) + G.add_edges_from(edges) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def connected_caveman_graph(l, k): + """Returns a connected caveman graph of `l` cliques of size `k`. + + The connected caveman graph is formed by creating `n` cliques of size + `k`, then a single edge in each clique is rewired to a node in an + adjacent clique. + + Parameters + ---------- + l : int + number of cliques + k : int + size of cliques (k at least 2 or NetworkXError is raised) + + Returns + ------- + G : NetworkX Graph + connected caveman graph + + Raises + ------ + NetworkXError + If the size of cliques `k` is smaller than 2. + + Notes + ----- + This returns an undirected graph, it can be converted to a directed + graph using :func:`nx.to_directed`, or a multigraph using + ``nx.MultiGraph(nx.caveman_graph(l, k))``. Only the undirected version is + described in [1]_ and it is unclear which of the directed + generalizations is most useful. + + Examples + -------- + >>> G = nx.connected_caveman_graph(3, 3) + + References + ---------- + .. [1] Watts, D. J. 'Networks, Dynamics, and the Small-World Phenomenon.' + Amer. J. Soc. 105, 493-527, 1999. + """ + if k < 2: + raise nx.NetworkXError( + "The size of cliques in a connected caveman graph must be at least 2." + ) + + G = nx.caveman_graph(l, k) + for start in range(0, l * k, k): + G.remove_edge(start, start + 1) + G.add_edge(start, (start - 1) % (l * k)) + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def relaxed_caveman_graph(l, k, p, seed=None): + """Returns a relaxed caveman graph. + + A relaxed caveman graph starts with `l` cliques of size `k`. Edges are + then randomly rewired with probability `p` to link different cliques. + + Parameters + ---------- + l : int + Number of groups + k : int + Size of cliques + p : float + Probability of rewiring each edge. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : NetworkX Graph + Relaxed Caveman Graph + + Raises + ------ + NetworkXError + If p is not in [0,1] + + Examples + -------- + >>> G = nx.relaxed_caveman_graph(2, 3, 0.1, seed=42) + + References + ---------- + .. [1] Santo Fortunato, Community Detection in Graphs, + Physics Reports Volume 486, Issues 3-5, February 2010, Pages 75-174. + https://arxiv.org/abs/0906.0612 + """ + G = nx.caveman_graph(l, k) + nodes = list(G) + for u, v in G.edges(): + if seed.random() < p: # rewire the edge + x = seed.choice(nodes) + if G.has_edge(u, x): + continue + G.remove_edge(u, v) + G.add_edge(u, x) + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_partition_graph(sizes, p_in, p_out, seed=None, directed=False): + """Returns the random partition graph with a partition of sizes. + + A partition graph is a graph of communities with sizes defined by + s in sizes. Nodes in the same group are connected with probability + p_in and nodes of different groups are connected with probability + p_out. + + Parameters + ---------- + sizes : list of ints + Sizes of groups + p_in : float + probability of edges with in groups + p_out : float + probability of edges between groups + directed : boolean optional, default=False + Whether to create a directed graph + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : NetworkX Graph or DiGraph + random partition graph of size sum(gs) + + Raises + ------ + NetworkXError + If p_in or p_out is not in [0,1] + + Examples + -------- + >>> G = nx.random_partition_graph([10, 10, 10], 0.25, 0.01) + >>> len(G) + 30 + >>> partition = G.graph["partition"] + >>> len(partition) + 3 + + Notes + ----- + This is a generalization of the planted-l-partition described in + [1]_. It allows for the creation of groups of any size. + + The partition is store as a graph attribute 'partition'. + + References + ---------- + .. [1] Santo Fortunato 'Community Detection in Graphs' Physical Reports + Volume 486, Issue 3-5 p. 75-174. https://arxiv.org/abs/0906.0612 + """ + # Use geometric method for O(n+m) complexity algorithm + # partition = nx.community_sets(nx.get_node_attributes(G, 'affiliation')) + if not 0.0 <= p_in <= 1.0: + raise nx.NetworkXError("p_in must be in [0,1]") + if not 0.0 <= p_out <= 1.0: + raise nx.NetworkXError("p_out must be in [0,1]") + + # create connection matrix + num_blocks = len(sizes) + p = [[p_out for s in range(num_blocks)] for r in range(num_blocks)] + for r in range(num_blocks): + p[r][r] = p_in + + return stochastic_block_model( + sizes, + p, + nodelist=None, + seed=seed, + directed=directed, + selfloops=False, + sparse=True, + ) + + +@py_random_state(4) +@nx._dispatchable(graphs=None, returns_graph=True) +def planted_partition_graph(l, k, p_in, p_out, seed=None, directed=False): + """Returns the planted l-partition graph. + + This model partitions a graph with n=l*k vertices in + l groups with k vertices each. Vertices of the same + group are linked with a probability p_in, and vertices + of different groups are linked with probability p_out. + + Parameters + ---------- + l : int + Number of groups + k : int + Number of vertices in each group + p_in : float + probability of connecting vertices within a group + p_out : float + probability of connected vertices between groups + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + directed : bool,optional (default=False) + If True return a directed graph + + Returns + ------- + G : NetworkX Graph or DiGraph + planted l-partition graph + + Raises + ------ + NetworkXError + If `p_in`, `p_out` are not in `[0, 1]` + + Examples + -------- + >>> G = nx.planted_partition_graph(4, 3, 0.5, 0.1, seed=42) + + See Also + -------- + random_partition_model + + References + ---------- + .. [1] A. Condon, R.M. Karp, Algorithms for graph partitioning + on the planted partition model, + Random Struct. Algor. 18 (2001) 116-140. + + .. [2] Santo Fortunato 'Community Detection in Graphs' Physical Reports + Volume 486, Issue 3-5 p. 75-174. https://arxiv.org/abs/0906.0612 + """ + return random_partition_graph([k] * l, p_in, p_out, seed=seed, directed=directed) + + +@py_random_state(6) +@nx._dispatchable(graphs=None, returns_graph=True) +def gaussian_random_partition_graph(n, s, v, p_in, p_out, directed=False, seed=None): + """Generate a Gaussian random partition graph. + + A Gaussian random partition graph is created by creating k partitions + each with a size drawn from a normal distribution with mean s and variance + s/v. Nodes are connected within clusters with probability p_in and + between clusters with probability p_out[1] + + Parameters + ---------- + n : int + Number of nodes in the graph + s : float + Mean cluster size + v : float + Shape parameter. The variance of cluster size distribution is s/v. + p_in : float + Probability of intra cluster connection. + p_out : float + Probability of inter cluster connection. + directed : boolean, optional default=False + Whether to create a directed graph or not + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : NetworkX Graph or DiGraph + gaussian random partition graph + + Raises + ------ + NetworkXError + If s is > n + If p_in or p_out is not in [0,1] + + Notes + ----- + Note the number of partitions is dependent on s,v and n, and that the + last partition may be considerably smaller, as it is sized to simply + fill out the nodes [1] + + See Also + -------- + random_partition_graph + + Examples + -------- + >>> G = nx.gaussian_random_partition_graph(100, 10, 10, 0.25, 0.1) + >>> len(G) + 100 + + References + ---------- + .. [1] Ulrik Brandes, Marco Gaertler, Dorothea Wagner, + Experiments on Graph Clustering Algorithms, + In the proceedings of the 11th Europ. Symp. Algorithms, 2003. + """ + if s > n: + raise nx.NetworkXError("s must be <= n") + assigned = 0 + sizes = [] + while True: + size = int(seed.gauss(s, s / v + 0.5)) + if size < 1: # how to handle 0 or negative sizes? + continue + if assigned + size >= n: + sizes.append(n - assigned) + break + assigned += size + sizes.append(size) + return random_partition_graph(sizes, p_in, p_out, seed=seed, directed=directed) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def ring_of_cliques(num_cliques, clique_size): + """Defines a "ring of cliques" graph. + + A ring of cliques graph is consisting of cliques, connected through single + links. Each clique is a complete graph. + + Parameters + ---------- + num_cliques : int + Number of cliques + clique_size : int + Size of cliques + + Returns + ------- + G : NetworkX Graph + ring of cliques graph + + Raises + ------ + NetworkXError + If the number of cliques is lower than 2 or + if the size of cliques is smaller than 2. + + Examples + -------- + >>> G = nx.ring_of_cliques(8, 4) + + See Also + -------- + connected_caveman_graph + + Notes + ----- + The `connected_caveman_graph` graph removes a link from each clique to + connect it with the next clique. Instead, the `ring_of_cliques` graph + simply adds the link without removing any link from the cliques. + """ + if num_cliques < 2: + raise nx.NetworkXError("A ring of cliques must have at least two cliques") + if clique_size < 2: + raise nx.NetworkXError("The cliques must have at least two nodes") + + G = nx.Graph() + for i in range(num_cliques): + edges = itertools.combinations( + range(i * clique_size, i * clique_size + clique_size), 2 + ) + G.add_edges_from(edges) + G.add_edge( + i * clique_size + 1, (i + 1) * clique_size % (num_cliques * clique_size) + ) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def windmill_graph(n, k): + """Generate a windmill graph. + A windmill graph is a graph of `n` cliques each of size `k` that are all + joined at one node. + It can be thought of as taking a disjoint union of `n` cliques of size `k`, + selecting one point from each, and contracting all of the selected points. + Alternatively, one could generate `n` cliques of size `k-1` and one node + that is connected to all other nodes in the graph. + + Parameters + ---------- + n : int + Number of cliques + k : int + Size of cliques + + Returns + ------- + G : NetworkX Graph + windmill graph with n cliques of size k + + Raises + ------ + NetworkXError + If the number of cliques is less than two + If the size of the cliques are less than two + + Examples + -------- + >>> G = nx.windmill_graph(4, 5) + + Notes + ----- + The node labeled `0` will be the node connected to all other nodes. + Note that windmill graphs are usually denoted `Wd(k,n)`, so the parameters + are in the opposite order as the parameters of this method. + """ + if n < 2: + msg = "A windmill graph must have at least two cliques" + raise nx.NetworkXError(msg) + if k < 2: + raise nx.NetworkXError("The cliques must have at least two nodes") + + G = nx.disjoint_union_all( + itertools.chain( + [nx.complete_graph(k)], (nx.complete_graph(k - 1) for _ in range(n - 1)) + ) + ) + G.add_edges_from((0, i) for i in range(k, G.number_of_nodes())) + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def stochastic_block_model( + sizes, p, nodelist=None, seed=None, directed=False, selfloops=False, sparse=True +): + """Returns a stochastic block model graph. + + This model partitions the nodes in blocks of arbitrary sizes, and places + edges between pairs of nodes independently, with a probability that depends + on the blocks. + + Parameters + ---------- + sizes : list of ints + Sizes of blocks + p : list of list of floats + Element (r,s) gives the density of edges going from the nodes + of group r to nodes of group s. + p must match the number of groups (len(sizes) == len(p)), + and it must be symmetric if the graph is undirected. + nodelist : list, optional + The block tags are assigned according to the node identifiers + in nodelist. If nodelist is None, then the ordering is the + range [0,sum(sizes)-1]. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + directed : boolean optional, default=False + Whether to create a directed graph or not. + selfloops : boolean optional, default=False + Whether to include self-loops or not. + sparse: boolean optional, default=True + Use the sparse heuristic to speed up the generator. + + Returns + ------- + g : NetworkX Graph or DiGraph + Stochastic block model graph of size sum(sizes) + + Raises + ------ + NetworkXError + If probabilities are not in [0,1]. + If the probability matrix is not square (directed case). + If the probability matrix is not symmetric (undirected case). + If the sizes list does not match nodelist or the probability matrix. + If nodelist contains duplicate. + + Examples + -------- + >>> sizes = [75, 75, 300] + >>> probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + >>> g = nx.stochastic_block_model(sizes, probs, seed=0) + >>> len(g) + 450 + >>> H = nx.quotient_graph(g, g.graph["partition"], relabel=True) + >>> for v in H.nodes(data=True): + ... print(round(v[1]["density"], 3)) + 0.245 + 0.348 + 0.405 + >>> for v in H.edges(data=True): + ... print(round(1.0 * v[2]["weight"] / (sizes[v[0]] * sizes[v[1]]), 3)) + 0.051 + 0.022 + 0.07 + + See Also + -------- + random_partition_graph + planted_partition_graph + gaussian_random_partition_graph + gnp_random_graph + + References + ---------- + .. [1] Holland, P. W., Laskey, K. B., & Leinhardt, S., + "Stochastic blockmodels: First steps", + Social networks, 5(2), 109-137, 1983. + """ + # Check if dimensions match + if len(sizes) != len(p): + raise nx.NetworkXException("'sizes' and 'p' do not match.") + # Check for probability symmetry (undirected) and shape (directed) + for row in p: + if len(p) != len(row): + raise nx.NetworkXException("'p' must be a square matrix.") + if not directed: + p_transpose = [list(i) for i in zip(*p)] + for i in zip(p, p_transpose): + for j in zip(i[0], i[1]): + if abs(j[0] - j[1]) > 1e-08: + raise nx.NetworkXException("'p' must be symmetric.") + # Check for probability range + for row in p: + for prob in row: + if prob < 0 or prob > 1: + raise nx.NetworkXException("Entries of 'p' not in [0,1].") + # Check for nodelist consistency + if nodelist is not None: + if len(nodelist) != sum(sizes): + raise nx.NetworkXException("'nodelist' and 'sizes' do not match.") + if len(nodelist) != len(set(nodelist)): + raise nx.NetworkXException("nodelist contains duplicate.") + else: + nodelist = range(sum(sizes)) + + # Setup the graph conditionally to the directed switch. + block_range = range(len(sizes)) + if directed: + g = nx.DiGraph() + block_iter = itertools.product(block_range, block_range) + else: + g = nx.Graph() + block_iter = itertools.combinations_with_replacement(block_range, 2) + # Split nodelist in a partition (list of sets). + size_cumsum = [sum(sizes[0:x]) for x in range(len(sizes) + 1)] + g.graph["partition"] = [ + set(nodelist[size_cumsum[x] : size_cumsum[x + 1]]) + for x in range(len(size_cumsum) - 1) + ] + # Setup nodes and graph name + for block_id, nodes in enumerate(g.graph["partition"]): + for node in nodes: + g.add_node(node, block=block_id) + + g.name = "stochastic_block_model" + + # Test for edge existence + parts = g.graph["partition"] + for i, j in block_iter: + if i == j: + if directed: + if selfloops: + edges = itertools.product(parts[i], parts[i]) + else: + edges = itertools.permutations(parts[i], 2) + else: + edges = itertools.combinations(parts[i], 2) + if selfloops: + edges = itertools.chain(edges, zip(parts[i], parts[i])) + for e in edges: + if seed.random() < p[i][j]: + g.add_edge(*e) + else: + edges = itertools.product(parts[i], parts[j]) + if sparse: + if p[i][j] == 1: # Test edges cases p_ij = 0 or 1 + for e in edges: + g.add_edge(*e) + elif p[i][j] > 0: + while True: + try: + logrand = math.log(seed.random()) + skip = math.floor(logrand / math.log(1 - p[i][j])) + # consume "skip" edges + next(itertools.islice(edges, skip, skip), None) + e = next(edges) + g.add_edge(*e) # __safe + except StopIteration: + break + else: + for e in edges: + if seed.random() < p[i][j]: + g.add_edge(*e) # __safe + return g + + +def _zipf_rv_below(gamma, xmin, threshold, seed): + """Returns a random value chosen from the bounded Zipf distribution. + + Repeatedly draws values from the Zipf distribution until the + threshold is met, then returns that value. + """ + result = nx.utils.zipf_rv(gamma, xmin, seed) + while result > threshold: + result = nx.utils.zipf_rv(gamma, xmin, seed) + return result + + +def _powerlaw_sequence(gamma, low, high, condition, length, max_iters, seed): + """Returns a list of numbers obeying a constrained power law distribution. + + ``gamma`` and ``low`` are the parameters for the Zipf distribution. + + ``high`` is the maximum allowed value for values draw from the Zipf + distribution. For more information, see :func:`_zipf_rv_below`. + + ``condition`` and ``length`` are Boolean-valued functions on + lists. While generating the list, random values are drawn and + appended to the list until ``length`` is satisfied by the created + list. Once ``condition`` is satisfied, the sequence generated in + this way is returned. + + ``max_iters`` indicates the number of times to generate a list + satisfying ``length``. If the number of iterations exceeds this + value, :exc:`~networkx.exception.ExceededMaxIterations` is raised. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + """ + for i in range(max_iters): + seq = [] + while not length(seq): + seq.append(_zipf_rv_below(gamma, low, high, seed)) + if condition(seq): + return seq + raise nx.ExceededMaxIterations("Could not create power law sequence") + + +def _hurwitz_zeta(x, q, tolerance): + """The Hurwitz zeta function, or the Riemann zeta function of two arguments. + + ``x`` must be greater than one and ``q`` must be positive. + + This function repeatedly computes subsequent partial sums until + convergence, as decided by ``tolerance``. + """ + z = 0 + z_prev = -float("inf") + k = 0 + while abs(z - z_prev) > tolerance: + z_prev = z + z += 1 / ((k + q) ** x) + k += 1 + return z + + +def _generate_min_degree(gamma, average_degree, max_degree, tolerance, max_iters): + """Returns a minimum degree from the given average degree.""" + # Defines zeta function whether or not Scipy is available + try: + from scipy.special import zeta + except ImportError: + + def zeta(x, q): + return _hurwitz_zeta(x, q, tolerance) + + min_deg_top = max_degree + min_deg_bot = 1 + min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot + itrs = 0 + mid_avg_deg = 0 + while abs(mid_avg_deg - average_degree) > tolerance: + if itrs > max_iters: + raise nx.ExceededMaxIterations("Could not match average_degree") + mid_avg_deg = 0 + for x in range(int(min_deg_mid), max_degree + 1): + mid_avg_deg += (x ** (-gamma + 1)) / zeta(gamma, min_deg_mid) + if mid_avg_deg > average_degree: + min_deg_top = min_deg_mid + min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot + else: + min_deg_bot = min_deg_mid + min_deg_mid = (min_deg_top - min_deg_bot) / 2 + min_deg_bot + itrs += 1 + # return int(min_deg_mid + 0.5) + return round(min_deg_mid) + + +def _generate_communities(degree_seq, community_sizes, mu, max_iters, seed): + """Returns a list of sets, each of which represents a community. + + ``degree_seq`` is the degree sequence that must be met by the + graph. + + ``community_sizes`` is the community size distribution that must be + met by the generated list of sets. + + ``mu`` is a float in the interval [0, 1] indicating the fraction of + intra-community edges incident to each node. + + ``max_iters`` is the number of times to try to add a node to a + community. This must be greater than the length of + ``degree_seq``, otherwise this function will always fail. If + the number of iterations exceeds this value, + :exc:`~networkx.exception.ExceededMaxIterations` is raised. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + The communities returned by this are sets of integers in the set {0, + ..., *n* - 1}, where *n* is the length of ``degree_seq``. + + """ + # This assumes the nodes in the graph will be natural numbers. + result = [set() for _ in community_sizes] + n = len(degree_seq) + free = list(range(n)) + for i in range(max_iters): + v = free.pop() + c = seed.choice(range(len(community_sizes))) + # s = int(degree_seq[v] * (1 - mu) + 0.5) + s = round(degree_seq[v] * (1 - mu)) + # If the community is large enough, add the node to the chosen + # community. Otherwise, return it to the list of unaffiliated + # nodes. + if s < community_sizes[c]: + result[c].add(v) + else: + free.append(v) + # If the community is too big, remove a node from it. + if len(result[c]) > community_sizes[c]: + free.append(result[c].pop()) + if not free: + return result + msg = "Could not assign communities; try increasing min_community" + raise nx.ExceededMaxIterations(msg) + + +@py_random_state(11) +@nx._dispatchable(graphs=None, returns_graph=True) +def LFR_benchmark_graph( + n, + tau1, + tau2, + mu, + average_degree=None, + min_degree=None, + max_degree=None, + min_community=None, + max_community=None, + tol=1.0e-7, + max_iters=500, + seed=None, +): + r"""Returns the LFR benchmark graph. + + This algorithm proceeds as follows: + + 1) Find a degree sequence with a power law distribution, and minimum + value ``min_degree``, which has approximate average degree + ``average_degree``. This is accomplished by either + + a) specifying ``min_degree`` and not ``average_degree``, + b) specifying ``average_degree`` and not ``min_degree``, in which + case a suitable minimum degree will be found. + + ``max_degree`` can also be specified, otherwise it will be set to + ``n``. Each node *u* will have $\mu \mathrm{deg}(u)$ edges + joining it to nodes in communities other than its own and $(1 - + \mu) \mathrm{deg}(u)$ edges joining it to nodes in its own + community. + 2) Generate community sizes according to a power law distribution + with exponent ``tau2``. If ``min_community`` and + ``max_community`` are not specified they will be selected to be + ``min_degree`` and ``max_degree``, respectively. Community sizes + are generated until the sum of their sizes equals ``n``. + 3) Each node will be randomly assigned a community with the + condition that the community is large enough for the node's + intra-community degree, $(1 - \mu) \mathrm{deg}(u)$ as + described in step 2. If a community grows too large, a random node + will be selected for reassignment to a new community, until all + nodes have been assigned a community. + 4) Each node *u* then adds $(1 - \mu) \mathrm{deg}(u)$ + intra-community edges and $\mu \mathrm{deg}(u)$ inter-community + edges. + + Parameters + ---------- + n : int + Number of nodes in the created graph. + + tau1 : float + Power law exponent for the degree distribution of the created + graph. This value must be strictly greater than one. + + tau2 : float + Power law exponent for the community size distribution in the + created graph. This value must be strictly greater than one. + + mu : float + Fraction of inter-community edges incident to each node. This + value must be in the interval [0, 1]. + + average_degree : float + Desired average degree of nodes in the created graph. This value + must be in the interval [0, *n*]. Exactly one of this and + ``min_degree`` must be specified, otherwise a + :exc:`NetworkXError` is raised. + + min_degree : int + Minimum degree of nodes in the created graph. This value must be + in the interval [0, *n*]. Exactly one of this and + ``average_degree`` must be specified, otherwise a + :exc:`NetworkXError` is raised. + + max_degree : int + Maximum degree of nodes in the created graph. If not specified, + this is set to ``n``, the total number of nodes in the graph. + + min_community : int + Minimum size of communities in the graph. If not specified, this + is set to ``min_degree``. + + max_community : int + Maximum size of communities in the graph. If not specified, this + is set to ``n``, the total number of nodes in the graph. + + tol : float + Tolerance when comparing floats, specifically when comparing + average degree values. + + max_iters : int + Maximum number of iterations to try to create the community sizes, + degree distribution, and community affiliations. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : NetworkX graph + The LFR benchmark graph generated according to the specified + parameters. + + Each node in the graph has a node attribute ``'community'`` that + stores the community (that is, the set of nodes) that includes + it. + + Raises + ------ + NetworkXError + If any of the parameters do not meet their upper and lower bounds: + + - ``tau1`` and ``tau2`` must be strictly greater than 1. + - ``mu`` must be in [0, 1]. + - ``max_degree`` must be in {1, ..., *n*}. + - ``min_community`` and ``max_community`` must be in {0, ..., + *n*}. + + If not exactly one of ``average_degree`` and ``min_degree`` is + specified. + + If ``min_degree`` is not specified and a suitable ``min_degree`` + cannot be found. + + ExceededMaxIterations + If a valid degree sequence cannot be created within + ``max_iters`` number of iterations. + + If a valid set of community sizes cannot be created within + ``max_iters`` number of iterations. + + If a valid community assignment cannot be created within ``10 * + n * max_iters`` number of iterations. + + Examples + -------- + Basic usage:: + + >>> from networkx.generators.community import LFR_benchmark_graph + >>> n = 250 + >>> tau1 = 3 + >>> tau2 = 1.5 + >>> mu = 0.1 + >>> G = LFR_benchmark_graph( + ... n, tau1, tau2, mu, average_degree=5, min_community=20, seed=10 + ... ) + + Continuing the example above, you can get the communities from the + node attributes of the graph:: + + >>> communities = {frozenset(G.nodes[v]["community"]) for v in G} + + Notes + ----- + This algorithm differs slightly from the original way it was + presented in [1]. + + 1) Rather than connecting the graph via a configuration model then + rewiring to match the intra-community and inter-community + degrees, we do this wiring explicitly at the end, which should be + equivalent. + 2) The code posted on the author's website [2] calculates the random + power law distributed variables and their average using + continuous approximations, whereas we use the discrete + distributions here as both degree and community size are + discrete. + + Though the authors describe the algorithm as quite robust, testing + during development indicates that a somewhat narrower parameter set + is likely to successfully produce a graph. Some suggestions have + been provided in the event of exceptions. + + References + ---------- + .. [1] "Benchmark graphs for testing community detection algorithms", + Andrea Lancichinetti, Santo Fortunato, and Filippo Radicchi, + Phys. Rev. E 78, 046110 2008 + .. [2] https://www.santofortunato.net/resources + + """ + # Perform some basic parameter validation. + if not tau1 > 1: + raise nx.NetworkXError("tau1 must be greater than one") + if not tau2 > 1: + raise nx.NetworkXError("tau2 must be greater than one") + if not 0 <= mu <= 1: + raise nx.NetworkXError("mu must be in the interval [0, 1]") + + # Validate parameters for generating the degree sequence. + if max_degree is None: + max_degree = n + elif not 0 < max_degree <= n: + raise nx.NetworkXError("max_degree must be in the interval (0, n]") + if not ((min_degree is None) ^ (average_degree is None)): + raise nx.NetworkXError( + "Must assign exactly one of min_degree and average_degree" + ) + if min_degree is None: + min_degree = _generate_min_degree( + tau1, average_degree, max_degree, tol, max_iters + ) + + # Generate a degree sequence with a power law distribution. + low, high = min_degree, max_degree + + def condition(seq): + return sum(seq) % 2 == 0 + + def length(seq): + return len(seq) >= n + + deg_seq = _powerlaw_sequence(tau1, low, high, condition, length, max_iters, seed) + + # Validate parameters for generating the community size sequence. + if min_community is None: + min_community = min(deg_seq) + if max_community is None: + max_community = max(deg_seq) + + # Generate a community size sequence with a power law distribution. + # + # TODO The original code incremented the number of iterations each + # time a new Zipf random value was drawn from the distribution. This + # differed from the way the number of iterations was incremented in + # `_powerlaw_degree_sequence`, so this code was changed to match + # that one. As a result, this code is allowed many more chances to + # generate a valid community size sequence. + low, high = min_community, max_community + + def condition(seq): + return sum(seq) == n + + def length(seq): + return sum(seq) >= n + + comms = _powerlaw_sequence(tau2, low, high, condition, length, max_iters, seed) + + # Generate the communities based on the given degree sequence and + # community sizes. + max_iters *= 10 * n + communities = _generate_communities(deg_seq, comms, mu, max_iters, seed) + + # Finally, generate the benchmark graph based on the given + # communities, joining nodes according to the intra- and + # inter-community degrees. + G = nx.Graph() + G.add_nodes_from(range(n)) + for c in communities: + for u in c: + while G.degree(u) < round(deg_seq[u] * (1 - mu)): + v = seed.choice(list(c)) + G.add_edge(u, v) + while G.degree(u) < deg_seq[u]: + v = seed.choice(range(n)) + if v not in c: + G.add_edge(u, v) + G.nodes[u]["community"] = c + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/degree_seq.py b/.venv/lib/python3.12/site-packages/networkx/generators/degree_seq.py new file mode 100644 index 0000000..d920f85 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/degree_seq.py @@ -0,0 +1,867 @@ +"""Generate graphs with a given degree sequence or expected degree sequence.""" + +import heapq +import math +from itertools import chain, combinations, zip_longest +from operator import itemgetter + +import networkx as nx +from networkx.utils import py_random_state, random_weighted_sample + +__all__ = [ + "configuration_model", + "directed_configuration_model", + "expected_degree_graph", + "havel_hakimi_graph", + "directed_havel_hakimi_graph", + "degree_sequence_tree", + "random_degree_sequence_graph", +] + +chaini = chain.from_iterable + + +def _to_stublist(degree_sequence): + """Returns a list of degree-repeated node numbers. + + ``degree_sequence`` is a list of nonnegative integers representing + the degrees of nodes in a graph. + + This function returns a list of node numbers with multiplicities + according to the given degree sequence. For example, if the first + element of ``degree_sequence`` is ``3``, then the first node number, + ``0``, will appear at the head of the returned list three times. The + node numbers are assumed to be the numbers zero through + ``len(degree_sequence) - 1``. + + Examples + -------- + + >>> degree_sequence = [1, 2, 3] + >>> _to_stublist(degree_sequence) + [0, 1, 1, 2, 2, 2] + + If a zero appears in the sequence, that means the node exists but + has degree zero, so that number will be skipped in the returned + list:: + + >>> degree_sequence = [2, 0, 1] + >>> _to_stublist(degree_sequence) + [0, 0, 2] + + """ + return list(chaini([n] * d for n, d in enumerate(degree_sequence))) + + +def _configuration_model( + deg_sequence, create_using, directed=False, in_deg_sequence=None, seed=None +): + """Helper function for generating either undirected or directed + configuration model graphs. + + ``deg_sequence`` is a list of nonnegative integers representing the + degree of the node whose label is the index of the list element. + + ``create_using`` see :func:`~networkx.empty_graph`. + + ``directed`` and ``in_deg_sequence`` are required if you want the + returned graph to be generated using the directed configuration + model algorithm. If ``directed`` is ``False``, then ``deg_sequence`` + is interpreted as the degree sequence of an undirected graph and + ``in_deg_sequence`` is ignored. Otherwise, if ``directed`` is + ``True``, then ``deg_sequence`` is interpreted as the out-degree + sequence and ``in_deg_sequence`` as the in-degree sequence of a + directed graph. + + .. note:: + + ``deg_sequence`` and ``in_deg_sequence`` need not be the same + length. + + ``seed`` is a random.Random or numpy.random.RandomState instance + + This function returns a graph, directed if and only if ``directed`` + is ``True``, generated according to the configuration model + algorithm. For more information on the algorithm, see the + :func:`configuration_model` or :func:`directed_configuration_model` + functions. + + """ + n = len(deg_sequence) + G = nx.empty_graph(n, create_using) + # If empty, return the null graph immediately. + if n == 0: + return G + # Build a list of available degree-repeated nodes. For example, + # for degree sequence [3, 2, 1, 1, 1], the "stub list" is + # initially [0, 0, 0, 1, 1, 2, 3, 4], that is, node 0 has degree + # 3 and thus is repeated 3 times, etc. + # + # Also, shuffle the stub list in order to get a random sequence of + # node pairs. + if directed: + pairs = zip_longest(deg_sequence, in_deg_sequence, fillvalue=0) + # Unzip the list of pairs into a pair of lists. + out_deg, in_deg = zip(*pairs) + + out_stublist = _to_stublist(out_deg) + in_stublist = _to_stublist(in_deg) + + seed.shuffle(out_stublist) + seed.shuffle(in_stublist) + else: + stublist = _to_stublist(deg_sequence) + # Choose a random balanced bipartition of the stublist, which + # gives a random pairing of nodes. In this implementation, we + # shuffle the list and then split it in half. + n = len(stublist) + half = n // 2 + seed.shuffle(stublist) + out_stublist, in_stublist = stublist[:half], stublist[half:] + G.add_edges_from(zip(out_stublist, in_stublist)) + return G + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def configuration_model(deg_sequence, create_using=None, seed=None): + """Returns a random graph with the given degree sequence. + + The configuration model generates a random pseudograph (graph with + parallel edges and self loops) by randomly assigning edges to + match the given degree sequence. + + Parameters + ---------- + deg_sequence : list of nonnegative integers + Each list entry corresponds to the degree of a node. + create_using : NetworkX graph constructor, optional (default MultiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : MultiGraph + A graph with the specified degree sequence. + Nodes are labeled starting at 0 with an index + corresponding to the position in deg_sequence. + + Raises + ------ + NetworkXError + If the degree sequence does not have an even sum. + + See Also + -------- + is_graphical + + Notes + ----- + As described by Newman [1]_. + + A non-graphical degree sequence (not realizable by some simple + graph) is allowed since this function returns graphs with self + loops and parallel edges. An exception is raised if the degree + sequence does not have an even sum. + + This configuration model construction process can lead to + duplicate edges and loops. You can remove the self-loops and + parallel edges (see below) which will likely result in a graph + that doesn't have the exact degree sequence specified. + + The density of self-loops and parallel edges tends to decrease as + the number of nodes increases. However, typically the number of + self-loops will approach a Poisson distribution with a nonzero mean, + and similarly for the number of parallel edges. Consider a node + with *k* stubs. The probability of being joined to another stub of + the same node is basically (*k* - *1*) / *N*, where *k* is the + degree and *N* is the number of nodes. So the probability of a + self-loop scales like *c* / *N* for some constant *c*. As *N* grows, + this means we expect *c* self-loops. Similarly for parallel edges. + + References + ---------- + .. [1] M.E.J. Newman, "The structure and function of complex networks", + SIAM REVIEW 45-2, pp 167-256, 2003. + + Examples + -------- + You can create a degree sequence following a particular distribution + by using the one of the distribution functions in + :mod:`~networkx.utils.random_sequence` (or one of your own). For + example, to create an undirected multigraph on one hundred nodes + with degree sequence chosen from the power law distribution: + + >>> sequence = nx.random_powerlaw_tree_sequence(100, tries=5000) + >>> G = nx.configuration_model(sequence) + >>> len(G) + 100 + >>> actual_degrees = [d for v, d in G.degree()] + >>> actual_degrees == sequence + True + + The returned graph is a multigraph, which may have parallel + edges. To remove any parallel edges from the returned graph: + + >>> G = nx.Graph(G) + + Similarly, to remove self-loops: + + >>> G.remove_edges_from(nx.selfloop_edges(G)) + + """ + if sum(deg_sequence) % 2 != 0: + msg = "Invalid degree sequence: sum of degrees must be even, not odd" + raise nx.NetworkXError(msg) + + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXNotImplemented("not implemented for directed graphs") + + G = _configuration_model(deg_sequence, G, seed=seed) + + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def directed_configuration_model( + in_degree_sequence, out_degree_sequence, create_using=None, seed=None +): + """Returns a directed_random graph with the given degree sequences. + + The configuration model generates a random directed pseudograph + (graph with parallel edges and self loops) by randomly assigning + edges to match the given degree sequences. + + Parameters + ---------- + in_degree_sequence : list of nonnegative integers + Each list entry corresponds to the in-degree of a node. + out_degree_sequence : list of nonnegative integers + Each list entry corresponds to the out-degree of a node. + create_using : NetworkX graph constructor, optional (default MultiDiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : MultiDiGraph + A graph with the specified degree sequences. + Nodes are labeled starting at 0 with an index + corresponding to the position in deg_sequence. + + Raises + ------ + NetworkXError + If the degree sequences do not have the same sum. + + See Also + -------- + configuration_model + + Notes + ----- + Algorithm as described by Newman [1]_. + + A non-graphical degree sequence (not realizable by some simple + graph) is allowed since this function returns graphs with self + loops and parallel edges. An exception is raised if the degree + sequences does not have the same sum. + + This configuration model construction process can lead to + duplicate edges and loops. You can remove the self-loops and + parallel edges (see below) which will likely result in a graph + that doesn't have the exact degree sequence specified. This + "finite-size effect" decreases as the size of the graph increases. + + References + ---------- + .. [1] Newman, M. E. J. and Strogatz, S. H. and Watts, D. J. + Random graphs with arbitrary degree distributions and their applications + Phys. Rev. E, 64, 026118 (2001) + + Examples + -------- + One can modify the in- and out-degree sequences from an existing + directed graph in order to create a new directed graph. For example, + here we modify the directed path graph: + + >>> D = nx.DiGraph([(0, 1), (1, 2), (2, 3)]) + >>> din = list(d for n, d in D.in_degree()) + >>> dout = list(d for n, d in D.out_degree()) + >>> din.append(1) + >>> dout[0] = 2 + >>> # We now expect an edge from node 0 to a new node, node 3. + ... D = nx.directed_configuration_model(din, dout) + + The returned graph is a directed multigraph, which may have parallel + edges. To remove any parallel edges from the returned graph: + + >>> D = nx.DiGraph(D) + + Similarly, to remove self-loops: + + >>> D.remove_edges_from(nx.selfloop_edges(D)) + + """ + if sum(in_degree_sequence) != sum(out_degree_sequence): + msg = "Invalid degree sequences: sequences must have equal sums" + raise nx.NetworkXError(msg) + + if create_using is None: + create_using = nx.MultiDiGraph + + G = _configuration_model( + out_degree_sequence, + create_using, + directed=True, + in_deg_sequence=in_degree_sequence, + seed=seed, + ) + + name = "directed configuration_model {} nodes {} edges" + return G + + +@py_random_state(1) +@nx._dispatchable(graphs=None, returns_graph=True) +def expected_degree_graph(w, seed=None, selfloops=True): + r"""Returns a random graph with given expected degrees. + + Given a sequence of expected degrees $W=(w_0,w_1,\ldots,w_{n-1})$ + of length $n$ this algorithm assigns an edge between node $u$ and + node $v$ with probability + + .. math:: + + p_{uv} = \frac{w_u w_v}{\sum_k w_k} . + + Parameters + ---------- + w : list + The list of expected degrees. + selfloops: bool (default=True) + Set to False to remove the possibility of self-loop edges. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + Graph + + Examples + -------- + >>> z = [10 for i in range(100)] + >>> G = nx.expected_degree_graph(z) + + Notes + ----- + The nodes have integer labels corresponding to index of expected degrees + input sequence. + + The complexity of this algorithm is $\mathcal{O}(n+m)$ where $n$ is the + number of nodes and $m$ is the expected number of edges. + + The model in [1]_ includes the possibility of self-loop edges. + Set selfloops=False to produce a graph without self loops. + + For finite graphs this model doesn't produce exactly the given + expected degree sequence. Instead the expected degrees are as + follows. + + For the case without self loops (selfloops=False), + + .. math:: + + E[deg(u)] = \sum_{v \ne u} p_{uv} + = w_u \left( 1 - \frac{w_u}{\sum_k w_k} \right) . + + + NetworkX uses the standard convention that a self-loop edge counts 2 + in the degree of a node, so with self loops (selfloops=True), + + .. math:: + + E[deg(u)] = \sum_{v \ne u} p_{uv} + 2 p_{uu} + = w_u \left( 1 + \frac{w_u}{\sum_k w_k} \right) . + + References + ---------- + .. [1] Fan Chung and L. Lu, Connected components in random graphs with + given expected degree sequences, Ann. Combinatorics, 6, + pp. 125-145, 2002. + .. [2] Joel Miller and Aric Hagberg, + Efficient generation of networks with given expected degrees, + in Algorithms and Models for the Web-Graph (WAW 2011), + Alan Frieze, Paul Horn, and Paweł Prałat (Eds), LNCS 6732, + pp. 115-126, 2011. + """ + n = len(w) + G = nx.empty_graph(n) + + # If there are no nodes are no edges in the graph, return the empty graph. + if n == 0 or max(w) == 0: + return G + + rho = 1 / sum(w) + # Sort the weights in decreasing order. The original order of the + # weights dictates the order of the (integer) node labels, so we + # need to remember the permutation applied in the sorting. + order = sorted(enumerate(w), key=itemgetter(1), reverse=True) + mapping = {c: u for c, (u, v) in enumerate(order)} + seq = [v for u, v in order] + last = n + if not selfloops: + last -= 1 + for u in range(last): + v = u + if not selfloops: + v += 1 + factor = seq[u] * rho + p = min(seq[v] * factor, 1) + while v < n and p > 0: + if p != 1: + r = seed.random() + v += math.floor(math.log(r, 1 - p)) + if v < n: + q = min(seq[v] * factor, 1) + if seed.random() < q / p: + G.add_edge(mapping[u], mapping[v]) + v += 1 + p = q + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def havel_hakimi_graph(deg_sequence, create_using=None): + """Returns a simple graph with given degree sequence constructed + using the Havel-Hakimi algorithm. + + Parameters + ---------- + deg_sequence: list of integers + Each integer corresponds to the degree of a node (need not be sorted). + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Directed graphs are not allowed. + + Raises + ------ + NetworkXException + For a non-graphical degree sequence (i.e. one + not realizable by some simple graph). + + Notes + ----- + The Havel-Hakimi algorithm constructs a simple graph by + successively connecting the node of highest degree to other nodes + of highest degree, resorting remaining nodes by degree, and + repeating the process. The resulting graph has a high + degree-associativity. Nodes are labeled 1,.., len(deg_sequence), + corresponding to their position in deg_sequence. + + The basic algorithm is from Hakimi [1]_ and was generalized by + Kleitman and Wang [2]_. + + References + ---------- + .. [1] Hakimi S., On Realizability of a Set of Integers as + Degrees of the Vertices of a Linear Graph. I, + Journal of SIAM, 10(3), pp. 496-506 (1962) + .. [2] Kleitman D.J. and Wang D.L. + Algorithms for Constructing Graphs and Digraphs with Given Valences + and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973) + """ + if not nx.is_graphical(deg_sequence): + raise nx.NetworkXError("Invalid degree sequence") + + p = len(deg_sequence) + G = nx.empty_graph(p, create_using) + if G.is_directed(): + raise nx.NetworkXError("Directed graphs are not supported") + num_degs = [[] for i in range(p)] + dmax, dsum, n = 0, 0, 0 + for d in deg_sequence: + # Process only the non-zero integers + if d > 0: + num_degs[d].append(n) + dmax, dsum, n = max(dmax, d), dsum + d, n + 1 + # Return graph if no edges + if n == 0: + return G + + modstubs = [(0, 0)] * (dmax + 1) + # Successively reduce degree sequence by removing the maximum degree + while n > 0: + # Retrieve the maximum degree in the sequence + while len(num_degs[dmax]) == 0: + dmax -= 1 + # If there are not enough stubs to connect to, then the sequence is + # not graphical + if dmax > n - 1: + raise nx.NetworkXError("Non-graphical integer sequence") + + # Remove largest stub in list + source = num_degs[dmax].pop() + n -= 1 + # Reduce the next dmax largest stubs + mslen = 0 + k = dmax + for i in range(dmax): + while len(num_degs[k]) == 0: + k -= 1 + target = num_degs[k].pop() + G.add_edge(source, target) + n -= 1 + if k > 1: + modstubs[mslen] = (k - 1, target) + mslen += 1 + # Add back to the list any nonzero stubs that were removed + for i in range(mslen): + (stubval, stubtarget) = modstubs[i] + num_degs[stubval].append(stubtarget) + n += 1 + + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using=None): + """Returns a directed graph with the given degree sequences. + + Parameters + ---------- + in_deg_sequence : list of integers + Each list entry corresponds to the in-degree of a node. + out_deg_sequence : list of integers + Each list entry corresponds to the out-degree of a node. + create_using : NetworkX graph constructor, optional (default DiGraph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : DiGraph + A graph with the specified degree sequences. + Nodes are labeled starting at 0 with an index + corresponding to the position in deg_sequence + + Raises + ------ + NetworkXError + If the degree sequences are not digraphical. + + See Also + -------- + configuration_model + + Notes + ----- + Algorithm as described by Kleitman and Wang [1]_. + + References + ---------- + .. [1] D.J. Kleitman and D.L. Wang + Algorithms for Constructing Graphs and Digraphs with Given Valences + and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973) + """ + in_deg_sequence = nx.utils.make_list_of_ints(in_deg_sequence) + out_deg_sequence = nx.utils.make_list_of_ints(out_deg_sequence) + + # Process the sequences and form two heaps to store degree pairs with + # either zero or nonzero out degrees + sumin, sumout = 0, 0 + nin, nout = len(in_deg_sequence), len(out_deg_sequence) + maxn = max(nin, nout) + G = nx.empty_graph(maxn, create_using, default=nx.DiGraph) + if maxn == 0: + return G + maxin = 0 + stubheap, zeroheap = [], [] + for n in range(maxn): + in_deg, out_deg = 0, 0 + if n < nout: + out_deg = out_deg_sequence[n] + if n < nin: + in_deg = in_deg_sequence[n] + if in_deg < 0 or out_deg < 0: + raise nx.NetworkXError( + "Invalid degree sequences. Sequence values must be positive." + ) + sumin, sumout, maxin = sumin + in_deg, sumout + out_deg, max(maxin, in_deg) + if in_deg > 0: + stubheap.append((-1 * out_deg, -1 * in_deg, n)) + elif out_deg > 0: + zeroheap.append((-1 * out_deg, n)) + if sumin != sumout: + raise nx.NetworkXError( + "Invalid degree sequences. Sequences must have equal sums." + ) + heapq.heapify(stubheap) + heapq.heapify(zeroheap) + + modstubs = [(0, 0, 0)] * (maxin + 1) + # Successively reduce degree sequence by removing the maximum + while stubheap: + # Remove first value in the sequence with a non-zero in degree + (freeout, freein, target) = heapq.heappop(stubheap) + freein *= -1 + if freein > len(stubheap) + len(zeroheap): + raise nx.NetworkXError("Non-digraphical integer sequence") + + # Attach arcs from the nodes with the most stubs + mslen = 0 + for i in range(freein): + if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0][0]): + (stubout, stubsource) = heapq.heappop(zeroheap) + stubin = 0 + else: + (stubout, stubin, stubsource) = heapq.heappop(stubheap) + if stubout == 0: + raise nx.NetworkXError("Non-digraphical integer sequence") + G.add_edge(stubsource, target) + # Check if source is now totally connected + if stubout + 1 < 0 or stubin < 0: + modstubs[mslen] = (stubout + 1, stubin, stubsource) + mslen += 1 + + # Add the nodes back to the heaps that still have available stubs + for i in range(mslen): + stub = modstubs[i] + if stub[1] < 0: + heapq.heappush(stubheap, stub) + else: + heapq.heappush(zeroheap, (stub[0], stub[2])) + if freeout < 0: + heapq.heappush(zeroheap, (freeout, target)) + + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def degree_sequence_tree(deg_sequence, create_using=None): + """Make a tree for the given degree sequence. + + A tree has #nodes-#edges=1 so + the degree sequence must have + len(deg_sequence)-sum(deg_sequence)/2=1 + """ + # The sum of the degree sequence must be even (for any undirected graph). + degree_sum = sum(deg_sequence) + if degree_sum % 2 != 0: + msg = "Invalid degree sequence: sum of degrees must be even, not odd" + raise nx.NetworkXError(msg) + if len(deg_sequence) - degree_sum // 2 != 1: + msg = ( + "Invalid degree sequence: tree must have number of nodes equal" + " to one less than the number of edges" + ) + raise nx.NetworkXError(msg) + G = nx.empty_graph(0, create_using) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + # Sort all degrees greater than 1 in decreasing order. + # + # TODO Does this need to be sorted in reverse order? + deg = sorted((s for s in deg_sequence if s > 1), reverse=True) + + # make path graph as backbone + n = len(deg) + 2 + nx.add_path(G, range(n)) + last = n + + # add the leaves + for source in range(1, n - 1): + nedges = deg.pop() - 2 + for target in range(last, last + nedges): + G.add_edge(source, target) + last += nedges + + # in case we added one too many + if len(G) > len(deg_sequence): + G.remove_node(0) + return G + + +@py_random_state(1) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_degree_sequence_graph(sequence, seed=None, tries=10): + r"""Returns a simple random graph with the given degree sequence. + + If the maximum degree $d_m$ in the sequence is $O(m^{1/4})$ then the + algorithm produces almost uniform random graphs in $O(m d_m)$ time + where $m$ is the number of edges. + + Parameters + ---------- + sequence : list of integers + Sequence of degrees + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + tries : int, optional + Maximum number of tries to create a graph + + Returns + ------- + G : Graph + A graph with the specified degree sequence. + Nodes are labeled starting at 0 with an index + corresponding to the position in the sequence. + + Raises + ------ + NetworkXUnfeasible + If the degree sequence is not graphical. + NetworkXError + If a graph is not produced in specified number of tries + + See Also + -------- + is_graphical, configuration_model + + Notes + ----- + The generator algorithm [1]_ is not guaranteed to produce a graph. + + References + ---------- + .. [1] Moshen Bayati, Jeong Han Kim, and Amin Saberi, + A sequential algorithm for generating random graphs. + Algorithmica, Volume 58, Number 4, 860-910, + DOI: 10.1007/s00453-009-9340-1 + + Examples + -------- + >>> sequence = [1, 2, 2, 3] + >>> G = nx.random_degree_sequence_graph(sequence, seed=42) + >>> sorted(d for n, d in G.degree()) + [1, 2, 2, 3] + """ + DSRG = DegreeSequenceRandomGraph(sequence, seed) + for try_n in range(tries): + try: + return DSRG.generate() + except nx.NetworkXUnfeasible: + pass + raise nx.NetworkXError(f"failed to generate graph in {tries} tries") + + +class DegreeSequenceRandomGraph: + # class to generate random graphs with a given degree sequence + # use random_degree_sequence_graph() + def __init__(self, degree, rng): + self.rng = rng + self.degree = list(degree) + if not nx.is_graphical(self.degree): + raise nx.NetworkXUnfeasible("degree sequence is not graphical") + # node labels are integers 0,...,n-1 + self.m = sum(self.degree) / 2.0 # number of edges + try: + self.dmax = max(self.degree) # maximum degree + except ValueError: + self.dmax = 0 + + def generate(self): + # remaining_degree is mapping from int->remaining degree + self.remaining_degree = dict(enumerate(self.degree)) + # add all nodes to make sure we get isolated nodes + self.graph = nx.Graph() + self.graph.add_nodes_from(self.remaining_degree) + # remove zero degree nodes + for n, d in list(self.remaining_degree.items()): + if d == 0: + del self.remaining_degree[n] + if len(self.remaining_degree) > 0: + # build graph in three phases according to how many unmatched edges + self.phase1() + self.phase2() + self.phase3() + return self.graph + + def update_remaining(self, u, v, aux_graph=None): + # decrement remaining nodes, modify auxiliary graph if in phase3 + if aux_graph is not None: + # remove edges from auxiliary graph + aux_graph.remove_edge(u, v) + if self.remaining_degree[u] == 1: + del self.remaining_degree[u] + if aux_graph is not None: + aux_graph.remove_node(u) + else: + self.remaining_degree[u] -= 1 + if self.remaining_degree[v] == 1: + del self.remaining_degree[v] + if aux_graph is not None: + aux_graph.remove_node(v) + else: + self.remaining_degree[v] -= 1 + + def p(self, u, v): + # degree probability + return 1 - self.degree[u] * self.degree[v] / (4.0 * self.m) + + def q(self, u, v): + # remaining degree probability + norm = max(self.remaining_degree.values()) ** 2 + return self.remaining_degree[u] * self.remaining_degree[v] / norm + + def suitable_edge(self): + """Returns True if and only if an arbitrary remaining node can + potentially be joined with some other remaining node. + + """ + nodes = iter(self.remaining_degree) + u = next(nodes) + return any(v not in self.graph[u] for v in nodes) + + def phase1(self): + # choose node pairs from (degree) weighted distribution + rem_deg = self.remaining_degree + while sum(rem_deg.values()) >= 2 * self.dmax**2: + u, v = sorted(random_weighted_sample(rem_deg, 2, self.rng)) + if self.graph.has_edge(u, v): + continue + if self.rng.random() < self.p(u, v): # accept edge + self.graph.add_edge(u, v) + self.update_remaining(u, v) + + def phase2(self): + # choose remaining nodes uniformly at random and use rejection sampling + remaining_deg = self.remaining_degree + rng = self.rng + while len(remaining_deg) >= 2 * self.dmax: + while True: + u, v = sorted(rng.sample(list(remaining_deg.keys()), 2)) + if self.graph.has_edge(u, v): + continue + if rng.random() < self.q(u, v): + break + if rng.random() < self.p(u, v): # accept edge + self.graph.add_edge(u, v) + self.update_remaining(u, v) + + def phase3(self): + # build potential remaining edges and choose with rejection sampling + potential_edges = combinations(self.remaining_degree, 2) + # build auxiliary graph of potential edges not already in graph + H = nx.Graph( + [(u, v) for (u, v) in potential_edges if not self.graph.has_edge(u, v)] + ) + rng = self.rng + while self.remaining_degree: + if not self.suitable_edge(): + raise nx.NetworkXUnfeasible("no suitable edges left") + while True: + u, v = sorted(rng.choice(list(H.edges()))) + if rng.random() < self.q(u, v): + break + if rng.random() < self.p(u, v): # accept edge + self.graph.add_edge(u, v) + self.update_remaining(u, v, aux_graph=H) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/directed.py b/.venv/lib/python3.12/site-packages/networkx/generators/directed.py new file mode 100644 index 0000000..0b2b5c3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/directed.py @@ -0,0 +1,572 @@ +""" +Generators for some directed graphs, including growing network (GN) graphs and +scale-free graphs. + +""" + +import numbers +from collections import Counter + +import networkx as nx +from networkx.generators.classic import empty_graph +from networkx.utils import ( + discrete_sequence, + np_random_state, + py_random_state, + weighted_choice, +) + +__all__ = [ + "gn_graph", + "gnc_graph", + "gnr_graph", + "random_k_out_graph", + "scale_free_graph", +] + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def gn_graph(n, kernel=None, create_using=None, seed=None): + """Returns the growing network (GN) digraph with `n` nodes. + + The GN graph is built by adding nodes one at a time with a link to one + previously added node. The target node for the link is chosen with + probability based on degree. The default attachment kernel is a linear + function of the degree of a node. + + The graph is always a (directed) tree. + + Parameters + ---------- + n : int + The number of nodes for the generated graph. + kernel : function + The attachment kernel. + create_using : NetworkX graph constructor, optional (default DiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Examples + -------- + To create the undirected GN graph, use the :meth:`~DiGraph.to_directed` + method:: + + >>> D = nx.gn_graph(10) # the GN graph + >>> G = D.to_undirected() # the undirected version + + To specify an attachment kernel, use the `kernel` keyword argument:: + + >>> D = nx.gn_graph(10, kernel=lambda x: x**1.5) # A_k = k^1.5 + + References + ---------- + .. [1] P. L. Krapivsky and S. Redner, + Organization of Growing Random Networks, + Phys. Rev. E, 63, 066123, 2001. + """ + G = empty_graph(1, create_using, default=nx.DiGraph) + if not G.is_directed(): + raise nx.NetworkXError("create_using must indicate a Directed Graph") + + if kernel is None: + + def kernel(x): + return x + + if n == 1: + return G + + G.add_edge(1, 0) # get started + ds = [1, 1] # degree sequence + + for source in range(2, n): + # compute distribution from kernel and degree + dist = [kernel(d) for d in ds] + # choose target from discrete distribution + target = discrete_sequence(1, distribution=dist, seed=seed)[0] + G.add_edge(source, target) + ds.append(1) # the source has only one link (degree one) + ds[target] += 1 # add one to the target link degree + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def gnr_graph(n, p, create_using=None, seed=None): + """Returns the growing network with redirection (GNR) digraph with `n` + nodes and redirection probability `p`. + + The GNR graph is built by adding nodes one at a time with a link to one + previously added node. The previous target node is chosen uniformly at + random. With probability `p` the link is instead "redirected" to the + successor node of the target. + + The graph is always a (directed) tree. + + Parameters + ---------- + n : int + The number of nodes for the generated graph. + p : float + The redirection probability. + create_using : NetworkX graph constructor, optional (default DiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Examples + -------- + To create the undirected GNR graph, use the :meth:`~DiGraph.to_directed` + method:: + + >>> D = nx.gnr_graph(10, 0.5) # the GNR graph + >>> G = D.to_undirected() # the undirected version + + References + ---------- + .. [1] P. L. Krapivsky and S. Redner, + Organization of Growing Random Networks, + Phys. Rev. E, 63, 066123, 2001. + """ + G = empty_graph(1, create_using, default=nx.DiGraph) + if not G.is_directed(): + raise nx.NetworkXError("create_using must indicate a Directed Graph") + + if n == 1: + return G + + for source in range(1, n): + target = seed.randrange(0, source) + if seed.random() < p and target != 0: + target = next(G.successors(target)) + G.add_edge(source, target) + return G + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def gnc_graph(n, create_using=None, seed=None): + """Returns the growing network with copying (GNC) digraph with `n` nodes. + + The GNC graph is built by adding nodes one at a time with a link to one + previously added node (chosen uniformly at random) and to all of that + node's successors. + + Parameters + ---------- + n : int + The number of nodes for the generated graph. + create_using : NetworkX graph constructor, optional (default DiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + References + ---------- + .. [1] P. L. Krapivsky and S. Redner, + Network Growth by Copying, + Phys. Rev. E, 71, 036118, 2005k.}, + """ + G = empty_graph(1, create_using, default=nx.DiGraph) + if not G.is_directed(): + raise nx.NetworkXError("create_using must indicate a Directed Graph") + + if n == 1: + return G + + for source in range(1, n): + target = seed.randrange(0, source) + for succ in G.successors(target): + G.add_edge(source, succ) + G.add_edge(source, target) + return G + + +@py_random_state(6) +@nx._dispatchable(graphs=None, returns_graph=True) +def scale_free_graph( + n, + alpha=0.41, + beta=0.54, + gamma=0.05, + delta_in=0.2, + delta_out=0, + seed=None, + initial_graph=None, +): + """Returns a scale-free directed graph. + + Parameters + ---------- + n : integer + Number of nodes in graph + alpha : float + Probability for adding a new node connected to an existing node + chosen randomly according to the in-degree distribution. + beta : float + Probability for adding an edge between two existing nodes. + One existing node is chosen randomly according the in-degree + distribution and the other chosen randomly according to the out-degree + distribution. + gamma : float + Probability for adding a new node connected to an existing node + chosen randomly according to the out-degree distribution. + delta_in : float + Bias for choosing nodes from in-degree distribution. + delta_out : float + Bias for choosing nodes from out-degree distribution. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + initial_graph : MultiDiGraph instance, optional + Build the scale-free graph starting from this initial MultiDiGraph, + if provided. + + Returns + ------- + MultiDiGraph + + Examples + -------- + Create a scale-free graph on one hundred nodes:: + + >>> G = nx.scale_free_graph(100) + + Notes + ----- + The sum of `alpha`, `beta`, and `gamma` must be 1. + + References + ---------- + .. [1] B. Bollobás, C. Borgs, J. Chayes, and O. Riordan, + Directed scale-free graphs, + Proceedings of the fourteenth annual ACM-SIAM Symposium on + Discrete Algorithms, 132--139, 2003. + """ + + def _choose_node(candidates, node_list, delta): + if delta > 0: + bias_sum = len(node_list) * delta + p_delta = bias_sum / (bias_sum + len(candidates)) + if seed.random() < p_delta: + return seed.choice(node_list) + return seed.choice(candidates) + + if initial_graph is not None and hasattr(initial_graph, "_adj"): + if not isinstance(initial_graph, nx.MultiDiGraph): + raise nx.NetworkXError("initial_graph must be a MultiDiGraph.") + G = initial_graph + else: + # Start with 3-cycle + G = nx.MultiDiGraph([(0, 1), (1, 2), (2, 0)]) + + if alpha <= 0: + raise ValueError("alpha must be > 0.") + if beta <= 0: + raise ValueError("beta must be > 0.") + if gamma <= 0: + raise ValueError("gamma must be > 0.") + + if abs(alpha + beta + gamma - 1.0) >= 1e-9: + raise ValueError("alpha+beta+gamma must equal 1.") + + if delta_in < 0: + raise ValueError("delta_in must be >= 0.") + + if delta_out < 0: + raise ValueError("delta_out must be >= 0.") + + # pre-populate degree states + vs = sum((count * [idx] for idx, count in G.out_degree()), []) + ws = sum((count * [idx] for idx, count in G.in_degree()), []) + + # pre-populate node state + node_list = list(G.nodes()) + + # see if there already are number-based nodes + numeric_nodes = [n for n in node_list if isinstance(n, numbers.Number)] + if len(numeric_nodes) > 0: + # set cursor for new nodes appropriately + cursor = max(int(n.real) for n in numeric_nodes) + 1 + else: + # or start at zero + cursor = 0 + + while len(G) < n: + r = seed.random() + + # random choice in alpha,beta,gamma ranges + if r < alpha: + # alpha + # add new node v + v = cursor + cursor += 1 + # also add to node state + node_list.append(v) + # choose w according to in-degree and delta_in + w = _choose_node(ws, node_list, delta_in) + + elif r < alpha + beta: + # beta + # choose v according to out-degree and delta_out + v = _choose_node(vs, node_list, delta_out) + # choose w according to in-degree and delta_in + w = _choose_node(ws, node_list, delta_in) + + else: + # gamma + # choose v according to out-degree and delta_out + v = _choose_node(vs, node_list, delta_out) + # add new node w + w = cursor + cursor += 1 + # also add to node state + node_list.append(w) + + # add edge to graph + G.add_edge(v, w) + + # update degree states + vs.append(v) + ws.append(w) + + return G + + +@py_random_state(4) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_uniform_k_out_graph(n, k, self_loops=True, with_replacement=True, seed=None): + """Returns a random `k`-out graph with uniform attachment. + + A random `k`-out graph with uniform attachment is a multidigraph + generated by the following algorithm. For each node *u*, choose + `k` nodes *v* uniformly at random (with replacement). Add a + directed edge joining *u* to *v*. + + Parameters + ---------- + n : int + The number of nodes in the returned graph. + + k : int + The out-degree of each node in the returned graph. + + self_loops : bool + If True, self-loops are allowed when generating the graph. + + with_replacement : bool + If True, neighbors are chosen with replacement and the + returned graph will be a directed multigraph. Otherwise, + neighbors are chosen without replacement and the returned graph + will be a directed graph. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + NetworkX graph + A `k`-out-regular directed graph generated according to the + above algorithm. It will be a multigraph if and only if + `with_replacement` is True. + + Raises + ------ + ValueError + If `with_replacement` is False and `k` is greater than + `n`. + + See also + -------- + random_k_out_graph + + Notes + ----- + The return digraph or multidigraph may not be strongly connected, or + even weakly connected. + + If `with_replacement` is True, this function is similar to + :func:`random_k_out_graph`, if that function had parameter `alpha` + set to positive infinity. + + """ + if with_replacement: + create_using = nx.MultiDiGraph() + + def sample(v, nodes): + if not self_loops: + nodes = nodes - {v} + return (seed.choice(list(nodes)) for i in range(k)) + + else: + create_using = nx.DiGraph() + + def sample(v, nodes): + if not self_loops: + nodes = nodes - {v} + return seed.sample(list(nodes), k) + + G = nx.empty_graph(n, create_using) + nodes = set(G) + for u in G: + G.add_edges_from((u, v) for v in sample(u, nodes)) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def random_k_out_graph(n, k, alpha, self_loops=True, seed=None): + """Returns a random `k`-out graph with preferential attachment. + + .. versionchanged:: 3.5 + Different implementations will be used based on whether NumPy is + available. See Notes for details. + + A random `k`-out graph with preferential attachment is a + multidigraph generated by the following algorithm. + + 1. Begin with an empty digraph, and initially set each node to have + weight `alpha`. + 2. Choose a node `u` with out-degree less than `k` uniformly at + random. + 3. Choose a node `v` from with probability proportional to its + weight. + 4. Add a directed edge from `u` to `v`, and increase the weight + of `v` by one. + 5. If each node has out-degree `k`, halt, otherwise repeat from + step 2. + + For more information on this model of random graph, see [1]_. + + Parameters + ---------- + n : int + The number of nodes in the returned graph. + + k : int + The out-degree of each node in the returned graph. + + alpha : float + A positive :class:`float` representing the initial weight of + each vertex. A higher number means that in step 3 above, nodes + will be chosen more like a true uniformly random sample, and a + lower number means that nodes are more likely to be chosen as + their in-degree increases. If this parameter is not positive, a + :exc:`ValueError` is raised. + + self_loops : bool + If True, self-loops are allowed when generating the graph. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + :class:`~networkx.classes.MultiDiGraph` + A `k`-out-regular multidigraph generated according to the above + algorithm. + + Raises + ------ + ValueError + If `alpha` is not positive. + + Notes + ----- + The returned multidigraph may not be strongly connected, or even + weakly connected. + + `random_k_out_graph` has two implementations: an array-based formulation that + uses `numpy` (``_random_k_out_graph_numpy``), and a pure-Python + implementation (``_random_k_out_graph_python``). + The NumPy implementation is more performant, especially for large `n`, and is + therefore used by default. If NumPy is not installed in the environment, + then the pure Python implementation is executed. + However, you can explicitly control which implementation is executed by directly + calling the corresponding function:: + + # Use numpy if available, else Python + nx.random_k_out_graph(1000, 5, alpha=1) + + # Use the numpy-based implementation (raises ImportError if numpy not installed) + nx.generators.directed._random_k_out_graph_numpy(1000, 5, alpha=1) + + # Use the Python-based implementation + nx.generators.directed._random_k_out_graph_python(1000, 5, alpha=1) + + References + ---------- + .. [1] Peterson, Nicholas R., and Boris Pittel. + "Distance between two random `k`-out digraphs, with and without preferential attachment." + arXiv preprint arXiv:1311.5961 (2013) . + + """ + if alpha < 0: + raise ValueError("alpha must be positive") + try: # Use numpy if available, otherwise fall back to pure Python implementation + return _random_k_out_graph_numpy(n, k, alpha, self_loops, seed) + except ImportError: + return _random_k_out_graph_python(n, k, alpha, self_loops, seed) + + +@np_random_state(4) +def _random_k_out_graph_numpy(n, k, alpha, self_loops=True, seed=None): + import numpy as np + + G = nx.empty_graph(n, create_using=nx.MultiDiGraph) + nodes = np.arange(n) + remaining_mask = np.full(n, True) + weights = np.full(n, alpha) + total_weight = n * alpha + out_strengths = np.zeros(n) + + for i in range(k * n): + u = seed.choice(nodes[remaining_mask]) + + if self_loops: + v = seed.choice(nodes, p=weights / total_weight) + else: # Ignore weight of u when selecting v + u_weight = weights[u] + weights[u] = 0 + v = seed.choice(nodes, p=weights / (total_weight - u_weight)) + weights[u] = u_weight + + G.add_edge(u, v) + weights[v] += 1 + total_weight += 1 + out_strengths[u] += 1 + if out_strengths[u] == k: + remaining_mask[u] = False + return G + + +@py_random_state(4) +def _random_k_out_graph_python(n, k, alpha, self_loops=True, seed=None): + G = nx.empty_graph(n, create_using=nx.MultiDiGraph) + weights = Counter(dict.fromkeys(G, alpha)) + out_strengths = Counter(dict.fromkeys(G, 0)) + + for i in range(k * n): + u = seed.choice(list(out_strengths.keys())) + # If self-loops are not allowed, make the source node `u` have + # weight zero. + if not self_loops: + uweight = weights.pop(u) + + v = weighted_choice(weights, seed=seed) + + if not self_loops: + weights[u] = uweight + + G.add_edge(u, v) + weights[v] += 1 + out_strengths[u] += 1 + if out_strengths[u] == k: + out_strengths.pop(u) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/duplication.py b/.venv/lib/python3.12/site-packages/networkx/generators/duplication.py new file mode 100644 index 0000000..3c3ade6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/duplication.py @@ -0,0 +1,174 @@ +"""Functions for generating graphs based on the "duplication" method. + +These graph generators start with a small initial graph then duplicate +nodes and (partially) duplicate their edges. These functions are +generally inspired by biological networks. + +""" + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils import py_random_state +from networkx.utils.misc import check_create_using + +__all__ = ["partial_duplication_graph", "duplication_divergence_graph"] + + +@py_random_state(4) +@nx._dispatchable(graphs=None, returns_graph=True) +def partial_duplication_graph(N, n, p, q, seed=None, *, create_using=None): + """Returns a random graph using the partial duplication model. + + Parameters + ---------- + N : int + The total number of nodes in the final graph. + + n : int + The number of nodes in the initial clique. + + p : float + The probability of joining each neighbor of a node to the + duplicate node. Must be a number in the between zero and one, + inclusive. + + q : float + The probability of joining the source node to the duplicate + node. Must be a number in the between zero and one, inclusive. + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Notes + ----- + A graph of nodes is grown by creating a fully connected graph + of size `n`. The following procedure is then repeated until + a total of `N` nodes have been reached. + + 1. A random node, *u*, is picked and a new node, *v*, is created. + 2. For each neighbor of *u* an edge from the neighbor to *v* is created + with probability `p`. + 3. An edge from *u* to *v* is created with probability `q`. + + This algorithm appears in [1]. + + This implementation allows the possibility of generating + disconnected graphs. + + References + ---------- + .. [1] Knudsen Michael, and Carsten Wiuf. "A Markov chain approach to + randomly grown graphs." Journal of Applied Mathematics 2008. + + + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if p < 0 or p > 1 or q < 0 or q > 1: + msg = "partial duplication graph must have 0 <= p, q <= 1." + raise NetworkXError(msg) + if n > N: + raise NetworkXError("partial duplication graph must have n <= N.") + + G = nx.complete_graph(n, create_using) + for new_node in range(n, N): + # Pick a random vertex, u, already in the graph. + src_node = seed.randint(0, new_node - 1) + + # Add a new vertex, v, to the graph. + G.add_node(new_node) + + # For each neighbor of u... + for nbr_node in list(nx.all_neighbors(G, src_node)): + # Add the neighbor to v with probability p. + if seed.random() < p: + G.add_edge(new_node, nbr_node) + + # Join v and u with probability q. + if seed.random() < q: + G.add_edge(new_node, src_node) + return G + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def duplication_divergence_graph(n, p, seed=None, *, create_using=None): + """Returns an undirected graph using the duplication-divergence model. + + A graph of `n` nodes is created by duplicating the initial nodes + and retaining edges incident to the original nodes with a retention + probability `p`. + + Parameters + ---------- + n : int + The desired number of nodes in the graph. + p : float + The probability for retaining the edge of the replicated node. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If `p` is not a valid probability. + If `n` is less than 2. + + Notes + ----- + This algorithm appears in [1]. + + This implementation disallows the possibility of generating + disconnected graphs. + + References + ---------- + .. [1] I. Ispolatov, P. L. Krapivsky, A. Yuryev, + "Duplication-divergence model of protein interaction network", + Phys. Rev. E, 71, 061911, 2005. + + """ + if p > 1 or p < 0: + msg = f"NetworkXError p={p} is not in [0,1]." + raise nx.NetworkXError(msg) + if n < 2: + msg = "n must be greater than or equal to 2" + raise nx.NetworkXError(msg) + + create_using = check_create_using(create_using, directed=False, multigraph=False) + G = nx.empty_graph(create_using=create_using) + + # Initialize the graph with two connected nodes. + G.add_edge(0, 1) + i = 2 + while i < n: + # Choose a random node from current graph to duplicate. + random_node = seed.choice(list(G)) + # Make the replica. + G.add_node(i) + # flag indicates whether at least one edge is connected on the replica. + flag = False + for nbr in G.neighbors(random_node): + if seed.random() < p: + # Link retention step. + G.add_edge(i, nbr) + flag = True + if not flag: + # Delete replica if no edges retained. + G.remove_node(i) + else: + # Successful duplication. + i += 1 + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/ego.py b/.venv/lib/python3.12/site-packages/networkx/generators/ego.py new file mode 100644 index 0000000..91ff3e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/ego.py @@ -0,0 +1,66 @@ +""" +Ego graph. +""" + +__all__ = ["ego_graph"] + +import networkx as nx + + +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def ego_graph(G, n, radius=1, center=True, undirected=False, distance=None): + """Returns induced subgraph of neighbors centered at node n within + a given radius. + + Parameters + ---------- + G : graph + A NetworkX Graph or DiGraph + + n : node + A single node + + radius : number, optional + Include all neighbors of distance<=radius from n. + + center : bool, optional + If False, do not include center node in graph + + undirected : bool, optional + If True use both in- and out-neighbors of directed graphs. + + distance : key, optional + Use specified edge data key as distance. For example, setting + distance='weight' will use the edge weight to measure the + distance from the node n. + + Notes + ----- + For directed graphs D this produces the "out" neighborhood + or successors. If you want the neighborhood of predecessors + first reverse the graph with D.reverse(). If you want both + directions use the keyword argument undirected=True. + + Node, edge, and graph attributes are copied to the returned subgraph. + """ + if undirected: + if distance is not None: + sp, _ = nx.single_source_dijkstra( + G.to_undirected(), n, cutoff=radius, weight=distance + ) + else: + sp = dict( + nx.single_source_shortest_path_length( + G.to_undirected(), n, cutoff=radius + ) + ) + else: + if distance is not None: + sp, _ = nx.single_source_dijkstra(G, n, cutoff=radius, weight=distance) + else: + sp = nx.single_source_shortest_path_length(G, n, cutoff=radius) + + H = G.subgraph(sp).copy() + if not center: + H.remove_node(n) + return H diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/expanders.py b/.venv/lib/python3.12/site-packages/networkx/generators/expanders.py new file mode 100644 index 0000000..65db764 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/expanders.py @@ -0,0 +1,476 @@ +"""Provides explicit constructions of expander graphs.""" + +import itertools + +import networkx as nx + +__all__ = [ + "margulis_gabber_galil_graph", + "chordal_cycle_graph", + "paley_graph", + "maybe_regular_expander", + "is_regular_expander", + "random_regular_expander_graph", +] + + +# Other discrete torus expanders can be constructed by using the following edge +# sets. For more information, see Chapter 4, "Expander Graphs", in +# "Pseudorandomness", by Salil Vadhan. +# +# For a directed expander, add edges from (x, y) to: +# +# (x, y), +# ((x + 1) % n, y), +# (x, (y + 1) % n), +# (x, (x + y) % n), +# (-y % n, x) +# +# For an undirected expander, add the reverse edges. +# +# Also appearing in the paper of Gabber and Galil: +# +# (x, y), +# (x, (x + y) % n), +# (x, (x + y + 1) % n), +# ((x + y) % n, y), +# ((x + y + 1) % n, y) +# +# and: +# +# (x, y), +# ((x + 2*y) % n, y), +# ((x + (2*y + 1)) % n, y), +# ((x + (2*y + 2)) % n, y), +# (x, (y + 2*x) % n), +# (x, (y + (2*x + 1)) % n), +# (x, (y + (2*x + 2)) % n), +# +@nx._dispatchable(graphs=None, returns_graph=True) +def margulis_gabber_galil_graph(n, create_using=None): + r"""Returns the Margulis-Gabber-Galil undirected MultiGraph on `n^2` nodes. + + The undirected MultiGraph is regular with degree `8`. Nodes are integer + pairs. The second-largest eigenvalue of the adjacency matrix of the graph + is at most `5 \sqrt{2}`, regardless of `n`. + + Parameters + ---------- + n : int + Determines the number of nodes in the graph: `n^2`. + create_using : NetworkX graph constructor, optional (default MultiGraph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : graph + The constructed undirected multigraph. + + Raises + ------ + NetworkXError + If the graph is directed or not a multigraph. + + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed() or not G.is_multigraph(): + msg = "`create_using` must be an undirected multigraph." + raise nx.NetworkXError(msg) + + for x, y in itertools.product(range(n), repeat=2): + for u, v in ( + ((x + 2 * y) % n, y), + ((x + (2 * y + 1)) % n, y), + (x, (y + 2 * x) % n), + (x, (y + (2 * x + 1)) % n), + ): + G.add_edge((x, y), (u, v)) + G.graph["name"] = f"margulis_gabber_galil_graph({n})" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def chordal_cycle_graph(p, create_using=None): + """Returns the chordal cycle graph on `p` nodes. + + The returned graph is a cycle graph on `p` nodes with chords joining each + vertex `x` to its inverse modulo `p`. This graph is a (mildly explicit) + 3-regular expander [1]_. + + `p` *must* be a prime number. + + Parameters + ---------- + p : a prime number + + The number of vertices in the graph. This also indicates where the + chordal edges in the cycle will be created. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : graph + The constructed undirected multigraph. + + Raises + ------ + NetworkXError + + If `create_using` indicates directed or not a multigraph. + + References + ---------- + + .. [1] Theorem 4.4.2 in A. Lubotzky. "Discrete groups, expanding graphs and + invariant measures", volume 125 of Progress in Mathematics. + Birkhäuser Verlag, Basel, 1994. + + """ + G = nx.empty_graph(0, create_using, default=nx.MultiGraph) + if G.is_directed() or not G.is_multigraph(): + msg = "`create_using` must be an undirected multigraph." + raise nx.NetworkXError(msg) + + for x in range(p): + left = (x - 1) % p + right = (x + 1) % p + # Here we apply Fermat's Little Theorem to compute the multiplicative + # inverse of x in Z/pZ. By Fermat's Little Theorem, + # + # x^p = x (mod p) + # + # Therefore, + # + # x * x^(p - 2) = 1 (mod p) + # + # The number 0 is a special case: we just let its inverse be itself. + chord = pow(x, p - 2, p) if x > 0 else 0 + for y in (left, right, chord): + G.add_edge(x, y) + G.graph["name"] = f"chordal_cycle_graph({p})" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def paley_graph(p, create_using=None): + r"""Returns the Paley $\frac{(p-1)}{2}$ -regular graph on $p$ nodes. + + The returned graph is a graph on $\mathbb{Z}/p\mathbb{Z}$ with edges between $x$ and $y$ + if and only if $x-y$ is a nonzero square in $\mathbb{Z}/p\mathbb{Z}$. + + If $p \equiv 1 \pmod 4$, $-1$ is a square in + $\mathbb{Z}/p\mathbb{Z}$ and therefore $x-y$ is a square if and + only if $y-x$ is also a square, i.e the edges in the Paley graph are symmetric. + + If $p \equiv 3 \pmod 4$, $-1$ is not a square in $\mathbb{Z}/p\mathbb{Z}$ + and therefore either $x-y$ or $y-x$ is a square in $\mathbb{Z}/p\mathbb{Z}$ but not both. + + Note that a more general definition of Paley graphs extends this construction + to graphs over $q=p^n$ vertices, by using the finite field $F_q$ instead of + $\mathbb{Z}/p\mathbb{Z}$. + This construction requires to compute squares in general finite fields and is + not what is implemented here (i.e `paley_graph(25)` does not return the true + Paley graph associated with $5^2$). + + Parameters + ---------- + p : int, an odd prime number. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : graph + The constructed directed graph. + + Raises + ------ + NetworkXError + If the graph is a multigraph. + + References + ---------- + Chapter 13 in B. Bollobas, Random Graphs. Second edition. + Cambridge Studies in Advanced Mathematics, 73. + Cambridge University Press, Cambridge (2001). + """ + G = nx.empty_graph(0, create_using, default=nx.DiGraph) + if G.is_multigraph(): + msg = "`create_using` cannot be a multigraph." + raise nx.NetworkXError(msg) + + # Compute the squares in Z/pZ. + # Make it a set to uniquify (there are exactly (p-1)/2 squares in Z/pZ + # when is prime). + square_set = {(x**2) % p for x in range(1, p) if (x**2) % p != 0} + + for x in range(p): + for x2 in square_set: + G.add_edge(x, (x + x2) % p) + G.graph["name"] = f"paley({p})" + return G + + +@nx.utils.decorators.np_random_state("seed") +@nx._dispatchable(graphs=None, returns_graph=True) +def maybe_regular_expander(n, d, *, create_using=None, max_tries=100, seed=None): + r"""Utility for creating a random regular expander. + + Returns a random $d$-regular graph on $n$ nodes which is an expander + graph with very good probability. + + Parameters + ---------- + n : int + The number of nodes. + d : int + The degree of each node. + create_using : Graph Instance or Constructor + Indicator of type of graph to return. + If a Graph-type instance, then clear and use it. + If a constructor, call it to create an empty graph. + Use the Graph constructor by default. + max_tries : int. (default: 100) + The number of allowed loops when generating each independent cycle + seed : (default: None) + Seed used to set random number generation state. See :ref`Randomness`. + + Notes + ----- + The nodes are numbered from $0$ to $n - 1$. + + The graph is generated by taking $d / 2$ random independent cycles. + + Joel Friedman proved that in this model the resulting + graph is an expander with probability + $1 - O(n^{-\tau})$ where $\tau = \lceil (\sqrt{d - 1}) / 2 \rceil - 1$. [1]_ + + Examples + -------- + >>> G = nx.maybe_regular_expander(n=200, d=6, seed=8020) + + Returns + ------- + G : graph + The constructed undirected graph. + + Raises + ------ + NetworkXError + If $d % 2 != 0$ as the degree must be even. + If $n - 1$ is less than $ 2d $ as the graph is complete at most. + If max_tries is reached + + See Also + -------- + is_regular_expander + random_regular_expander_graph + + References + ---------- + .. [1] Joel Friedman, + A Proof of Alon's Second Eigenvalue Conjecture and Related Problems, 2004 + https://arxiv.org/abs/cs/0405020 + + """ + + import numpy as np + + if n < 1: + raise nx.NetworkXError("n must be a positive integer") + + if not (d >= 2): + raise nx.NetworkXError("d must be greater than or equal to 2") + + if not (d % 2 == 0): + raise nx.NetworkXError("d must be even") + + if not (n - 1 >= d): + raise nx.NetworkXError( + f"Need n-1>= d to have room for {d // 2} independent cycles with {n} nodes" + ) + + G = nx.empty_graph(n, create_using) + + if n < 2: + return G + + cycles = [] + edges = set() + + # Create d / 2 cycles + for i in range(d // 2): + iterations = max_tries + # Make sure the cycles are independent to have a regular graph + while len(edges) != (i + 1) * n: + iterations -= 1 + # Faster than random.permutation(n) since there are only + # (n-1)! distinct cycles against n! permutations of size n + cycle = seed.permutation(n - 1).tolist() + cycle.append(n - 1) + + new_edges = { + (u, v) + for u, v in nx.utils.pairwise(cycle, cyclic=True) + if (u, v) not in edges and (v, u) not in edges + } + # If the new cycle has no edges in common with previous cycles + # then add it to the list otherwise try again + if len(new_edges) == n: + cycles.append(cycle) + edges.update(new_edges) + + if iterations == 0: + raise nx.NetworkXError("Too many iterations in maybe_regular_expander") + + G.add_edges_from(edges) + + return G + + +@nx.utils.not_implemented_for("directed") +@nx.utils.not_implemented_for("multigraph") +@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}}) +def is_regular_expander(G, *, epsilon=0): + r"""Determines whether the graph G is a regular expander. [1]_ + + An expander graph is a sparse graph with strong connectivity properties. + + More precisely, this helper checks whether the graph is a + regular $(n, d, \lambda)$-expander with $\lambda$ close to + the Alon-Boppana bound and given by + $\lambda = 2 \sqrt{d - 1} + \epsilon$. [2]_ + + In the case where $\epsilon = 0$ then if the graph successfully passes the test + it is a Ramanujan graph. [3]_ + + A Ramanujan graph has spectral gap almost as large as possible, which makes them + excellent expanders. + + Parameters + ---------- + G : NetworkX graph + epsilon : int, float, default=0 + + Returns + ------- + bool + Whether the given graph is a regular $(n, d, \lambda)$-expander + where $\lambda = 2 \sqrt{d - 1} + \epsilon$. + + Examples + -------- + >>> G = nx.random_regular_expander_graph(20, 4) + >>> nx.is_regular_expander(G) + True + + See Also + -------- + maybe_regular_expander + random_regular_expander_graph + + References + ---------- + .. [1] Expander graph, https://en.wikipedia.org/wiki/Expander_graph + .. [2] Alon-Boppana bound, https://en.wikipedia.org/wiki/Alon%E2%80%93Boppana_bound + .. [3] Ramanujan graphs, https://en.wikipedia.org/wiki/Ramanujan_graph + + """ + + import numpy as np + import scipy as sp + + if epsilon < 0: + raise nx.NetworkXError("epsilon must be non negative") + + if not nx.is_regular(G): + return False + + _, d = nx.utils.arbitrary_element(G.degree) + + A = nx.adjacency_matrix(G, dtype=float) + lams = sp.sparse.linalg.eigsh(A, which="LM", k=2, return_eigenvectors=False) + + # lambda2 is the second biggest eigenvalue + lambda2 = min(lams) + + # Use bool() to convert numpy scalar to Python Boolean + return bool(abs(lambda2) < 2 * np.sqrt(d - 1) + epsilon) + + +@nx.utils.decorators.np_random_state("seed") +@nx._dispatchable(graphs=None, returns_graph=True) +def random_regular_expander_graph( + n, d, *, epsilon=0, create_using=None, max_tries=100, seed=None +): + r"""Returns a random regular expander graph on $n$ nodes with degree $d$. + + An expander graph is a sparse graph with strong connectivity properties. [1]_ + + More precisely the returned graph is a $(n, d, \lambda)$-expander with + $\lambda = 2 \sqrt{d - 1} + \epsilon$, close to the Alon-Boppana bound. [2]_ + + In the case where $\epsilon = 0$ it returns a Ramanujan graph. + A Ramanujan graph has spectral gap almost as large as possible, + which makes them excellent expanders. [3]_ + + Parameters + ---------- + n : int + The number of nodes. + d : int + The degree of each node. + epsilon : int, float, default=0 + max_tries : int, (default: 100) + The number of allowed loops, also used in the maybe_regular_expander utility + seed : (default: None) + Seed used to set random number generation state. See :ref`Randomness`. + + Raises + ------ + NetworkXError + If max_tries is reached + + Examples + -------- + >>> G = nx.random_regular_expander_graph(20, 4) + >>> nx.is_regular_expander(G) + True + + Notes + ----- + This loops over `maybe_regular_expander` and can be slow when + $n$ is too big or $\epsilon$ too small. + + See Also + -------- + maybe_regular_expander + is_regular_expander + + References + ---------- + .. [1] Expander graph, https://en.wikipedia.org/wiki/Expander_graph + .. [2] Alon-Boppana bound, https://en.wikipedia.org/wiki/Alon%E2%80%93Boppana_bound + .. [3] Ramanujan graphs, https://en.wikipedia.org/wiki/Ramanujan_graph + + """ + G = maybe_regular_expander( + n, d, create_using=create_using, max_tries=max_tries, seed=seed + ) + iterations = max_tries + + while not is_regular_expander(G, epsilon=epsilon): + iterations -= 1 + G = maybe_regular_expander( + n=n, d=d, create_using=create_using, max_tries=max_tries, seed=seed + ) + + if iterations == 0: + raise nx.NetworkXError( + "Too many iterations in random_regular_expander_graph" + ) + + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/geometric.py b/.venv/lib/python3.12/site-packages/networkx/generators/geometric.py new file mode 100644 index 0000000..fdd4f62 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/geometric.py @@ -0,0 +1,1037 @@ +"""Generators for geometric graphs.""" + +import math +from bisect import bisect_left +from itertools import accumulate, combinations, product + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = [ + "geometric_edges", + "geographical_threshold_graph", + "navigable_small_world_graph", + "random_geometric_graph", + "soft_random_geometric_graph", + "thresholded_random_geometric_graph", + "waxman_graph", + "geometric_soft_configuration_graph", +] + + +@nx._dispatchable(node_attrs="pos_name") +def geometric_edges(G, radius, p=2, *, pos_name="pos"): + """Returns edge list of node pairs within `radius` of each other. + + Parameters + ---------- + G : networkx graph + The graph from which to generate the edge list. The nodes in `G` should + have an attribute ``pos`` corresponding to the node position, which is + used to compute the distance to other nodes. + radius : scalar + The distance threshold. Edges are included in the edge list if the + distance between the two nodes is less than `radius`. + pos_name : string, default="pos" + The name of the node attribute which represents the position of each + node in 2D coordinates. Every node in the Graph must have this attribute. + p : scalar, default=2 + The `Minkowski distance metric + `_ used to compute + distances. The default value is 2, i.e. Euclidean distance. + + Returns + ------- + edges : list + List of edges whose distances are less than `radius` + + Notes + ----- + Radius uses Minkowski distance metric `p`. + If scipy is available, `scipy.spatial.cKDTree` is used to speed computation. + + Examples + -------- + Create a graph with nodes that have a "pos" attribute representing 2D + coordinates. + + >>> G = nx.Graph() + >>> G.add_nodes_from( + ... [ + ... (0, {"pos": (0, 0)}), + ... (1, {"pos": (3, 0)}), + ... (2, {"pos": (8, 0)}), + ... ] + ... ) + >>> nx.geometric_edges(G, radius=1) + [] + >>> nx.geometric_edges(G, radius=4) + [(0, 1)] + >>> nx.geometric_edges(G, radius=6) + [(0, 1), (1, 2)] + >>> nx.geometric_edges(G, radius=9) + [(0, 1), (0, 2), (1, 2)] + """ + # Input validation - every node must have a "pos" attribute + for n, pos in G.nodes(data=pos_name): + if pos is None: + raise nx.NetworkXError( + f"Node {n} (and all nodes) must have a '{pos_name}' attribute." + ) + + # NOTE: See _geometric_edges for the actual implementation. The reason this + # is split into two functions is to avoid the overhead of input validation + # every time the function is called internally in one of the other + # geometric generators + return _geometric_edges(G, radius, p, pos_name) + + +def _geometric_edges(G, radius, p, pos_name): + """ + Implements `geometric_edges` without input validation. See `geometric_edges` + for complete docstring. + """ + nodes_pos = G.nodes(data=pos_name) + try: + import scipy as sp + except ImportError: + # no scipy KDTree so compute by for-loop + radius_p = radius**p + edges = [ + (u, v) + for (u, pu), (v, pv) in combinations(nodes_pos, 2) + if sum(abs(a - b) ** p for a, b in zip(pu, pv)) <= radius_p + ] + return edges + # scipy KDTree is available + nodes, coords = list(zip(*nodes_pos)) + kdtree = sp.spatial.cKDTree(coords) # Cannot provide generator. + edge_indexes = kdtree.query_pairs(radius, p) + edges = [(nodes[u], nodes[v]) for u, v in sorted(edge_indexes)] + return edges + + +@py_random_state(5) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_geometric_graph( + n, radius, dim=2, pos=None, p=2, seed=None, *, pos_name="pos" +): + """Returns a random geometric graph in the unit cube of dimensions `dim`. + + The random geometric graph model places `n` nodes uniformly at + random in the unit cube. Two nodes are joined by an edge if the + distance between the nodes is at most `radius`. + + Edges are determined using a KDTree when SciPy is available. + This reduces the time complexity from $O(n^2)$ to $O(n)$. + + Parameters + ---------- + n : int or iterable + Number of nodes or iterable of nodes + radius: float + Distance threshold value + dim : int, optional + Dimension of graph + pos : dict, optional + A dictionary keyed by node with node positions as values. + p : float, optional + Which Minkowski distance metric to use. `p` has to meet the condition + ``1 <= p <= infinity``. + + If this argument is not specified, the :math:`L^2` metric + (the Euclidean distance metric), p = 2 is used. + This should not be confused with the `p` of an Erdős-Rényi random + graph, which represents probability. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + pos_name : string, default="pos" + The name of the node attribute which represents the position + in 2D coordinates of the node in the returned graph. + + Returns + ------- + Graph + A random geometric graph, undirected and without self-loops. + Each node has a node attribute ``'pos'`` that stores the + position of that node in Euclidean space as provided by the + ``pos`` keyword argument or, if ``pos`` was not provided, as + generated by this function. + + Examples + -------- + Create a random geometric graph on twenty nodes where nodes are joined by + an edge if their distance is at most 0.1:: + + >>> G = nx.random_geometric_graph(20, 0.1) + + Notes + ----- + This uses a *k*-d tree to build the graph. + + The `pos` keyword argument can be used to specify node positions so you + can create an arbitrary distribution and domain for positions. + + For example, to use a 2D Gaussian distribution of node positions with mean + (0, 0) and standard deviation 2:: + + >>> import random + >>> n = 20 + >>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)} + >>> G = nx.random_geometric_graph(n, 0.2, pos=pos) + + References + ---------- + .. [1] Penrose, Mathew, *Random Geometric Graphs*, + Oxford Studies in Probability, 5, 2003. + + """ + # TODO Is this function just a special case of the geographical + # threshold graph? + # + # half_radius = {v: radius / 2 for v in n} + # return geographical_threshold_graph(nodes, theta=1, alpha=1, + # weight=half_radius) + # + G = nx.empty_graph(n) + # If no positions are provided, choose uniformly random vectors in + # Euclidean space of the specified dimension. + if pos is None: + pos = {v: [seed.random() for i in range(dim)] for v in G} + nx.set_node_attributes(G, pos, pos_name) + + G.add_edges_from(_geometric_edges(G, radius, p, pos_name)) + return G + + +@py_random_state(6) +@nx._dispatchable(graphs=None, returns_graph=True) +def soft_random_geometric_graph( + n, radius, dim=2, pos=None, p=2, p_dist=None, seed=None, *, pos_name="pos" +): + r"""Returns a soft random geometric graph in the unit cube. + + The soft random geometric graph [1] model places `n` nodes uniformly at + random in the unit cube in dimension `dim`. Two nodes of distance, `dist`, + computed by the `p`-Minkowski distance metric are joined by an edge with + probability `p_dist` if the computed distance metric value of the nodes + is at most `radius`, otherwise they are not joined. + + Edges within `radius` of each other are determined using a KDTree when + SciPy is available. This reduces the time complexity from :math:`O(n^2)` + to :math:`O(n)`. + + Parameters + ---------- + n : int or iterable + Number of nodes or iterable of nodes + radius: float + Distance threshold value + dim : int, optional + Dimension of graph + pos : dict, optional + A dictionary keyed by node with node positions as values. + p : float, optional + Which Minkowski distance metric to use. + `p` has to meet the condition ``1 <= p <= infinity``. + + If this argument is not specified, the :math:`L^2` metric + (the Euclidean distance metric), p = 2 is used. + + This should not be confused with the `p` of an Erdős-Rényi random + graph, which represents probability. + p_dist : function, optional + A probability density function computing the probability of + connecting two nodes that are of distance, dist, computed by the + Minkowski distance metric. The probability density function, `p_dist`, + must be any function that takes the metric value as input + and outputs a single probability value between 0-1. The `scipy.stats` + package has many probability distribution functions implemented and + tools for custom probability distribution definitions [2], and passing + the .pdf method of `scipy.stats` distributions can be used here. If the + probability function, `p_dist`, is not supplied, the default function + is an exponential distribution with rate parameter :math:`\lambda=1`. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + pos_name : string, default="pos" + The name of the node attribute which represents the position + in 2D coordinates of the node in the returned graph. + + Returns + ------- + Graph + A soft random geometric graph, undirected and without self-loops. + Each node has a node attribute ``'pos'`` that stores the + position of that node in Euclidean space as provided by the + ``pos`` keyword argument or, if ``pos`` was not provided, as + generated by this function. + + Notes + ----- + This uses a *k*-d tree to build the graph. + + References + ---------- + .. [1] Penrose, Mathew D. "Connectivity of soft random geometric graphs." + The Annals of Applied Probability 26.2 (2016): 986-1028. + + Examples + -------- + Default Graph: + + >>> G = nx.soft_random_geometric_graph(50, 0.2) + + Custom Graph: + + The `pos` keyword argument can be used to specify node positions so you + can create an arbitrary distribution and domain for positions. + + The `scipy.stats` package can be used to define the probability distribution + with the ``.pdf`` method used as `p_dist`. + + For example, create a soft random geometric graph on 100 nodes using a 2D + Gaussian distribution of node positions with mean (0, 0) and standard deviation 2, + where nodes are joined by an edge with probability computed from an + exponential distribution with rate parameter :math:`\lambda=1` if their + Euclidean distance is at most 0.2. + + >>> import random + >>> from scipy.stats import expon + >>> n = 100 + >>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)} + >>> p_dist = lambda x: expon.pdf(x, scale=1) + >>> G = nx.soft_random_geometric_graph(n, 0.2, pos=pos, p_dist=p_dist) + + """ + G = nx.empty_graph(n) + G.name = f"soft_random_geometric_graph({n}, {radius}, {dim})" + # If no positions are provided, choose uniformly random vectors in + # Euclidean space of the specified dimension. + if pos is None: + pos = {v: [seed.random() for i in range(dim)] for v in G} + nx.set_node_attributes(G, pos, pos_name) + + # if p_dist function not supplied the default function is an exponential + # distribution with rate parameter :math:`\lambda=1`. + if p_dist is None: + + def p_dist(dist): + return math.exp(-dist) + + def should_join(edge): + u, v = edge + dist = (sum(abs(a - b) ** p for a, b in zip(pos[u], pos[v]))) ** (1 / p) + return seed.random() < p_dist(dist) + + G.add_edges_from(filter(should_join, _geometric_edges(G, radius, p, pos_name))) + return G + + +@py_random_state(7) +@nx._dispatchable(graphs=None, returns_graph=True) +def geographical_threshold_graph( + n, + theta, + dim=2, + pos=None, + weight=None, + metric=None, + p_dist=None, + seed=None, + *, + pos_name="pos", + weight_name="weight", +): + r"""Returns a geographical threshold graph. + + The geographical threshold graph model places $n$ nodes uniformly at + random in a rectangular domain. Each node $u$ is assigned a weight + $w_u$. Two nodes $u$ and $v$ are joined by an edge if + + .. math:: + + (w_u + w_v)p_{dist}(r) \ge \theta + + where `r` is the distance between `u` and `v`, `p_dist` is any function of + `r`, and :math:`\theta` as the threshold parameter. `p_dist` is used to + give weight to the distance between nodes when deciding whether or not + they should be connected. The larger `p_dist` is, the more prone nodes + separated by `r` are to be connected, and vice versa. + + Parameters + ---------- + n : int or iterable + Number of nodes or iterable of nodes + theta: float + Threshold value + dim : int, optional + Dimension of graph + pos : dict + Node positions as a dictionary of tuples keyed by node. + weight : dict + Node weights as a dictionary of numbers keyed by node. + metric : function + A metric on vectors of numbers (represented as lists or + tuples). This must be a function that accepts two lists (or + tuples) as input and yields a number as output. The function + must also satisfy the four requirements of a `metric`_. + Specifically, if $d$ is the function and $x$, $y$, + and $z$ are vectors in the graph, then $d$ must satisfy + + 1. $d(x, y) \ge 0$, + 2. $d(x, y) = 0$ if and only if $x = y$, + 3. $d(x, y) = d(y, x)$, + 4. $d(x, z) \le d(x, y) + d(y, z)$. + + If this argument is not specified, the Euclidean distance metric is + used. + + .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29 + p_dist : function, optional + Any function used to give weight to the distance between nodes when + deciding whether or not they should be connected. `p_dist` was + originally conceived as a probability density function giving the + probability of connecting two nodes that are of metric distance `r` + apart. The implementation here allows for more arbitrary definitions + of `p_dist` that do not need to correspond to valid probability + density functions. The :mod:`scipy.stats` package has many + probability density functions implemented and tools for custom + probability density definitions, and passing the ``.pdf`` method of + `scipy.stats` distributions can be used here. If ``p_dist=None`` + (the default), the exponential function :math:`r^{-2}` is used. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + pos_name : string, default="pos" + The name of the node attribute which represents the position + in 2D coordinates of the node in the returned graph. + weight_name : string, default="weight" + The name of the node attribute which represents the weight + of the node in the returned graph. + + Returns + ------- + Graph + A random geographic threshold graph, undirected and without + self-loops. + + Each node has a node attribute ``pos`` that stores the + position of that node in Euclidean space as provided by the + ``pos`` keyword argument or, if ``pos`` was not provided, as + generated by this function. Similarly, each node has a node + attribute ``weight`` that stores the weight of that node as + provided or as generated. + + Examples + -------- + Specify an alternate distance metric using the ``metric`` keyword + argument. For example, to use the `taxicab metric`_ instead of the + default `Euclidean metric`_:: + + >>> dist = lambda x, y: sum(abs(a - b) for a, b in zip(x, y)) + >>> G = nx.geographical_threshold_graph(10, 0.1, metric=dist) + + .. _taxicab metric: https://en.wikipedia.org/wiki/Taxicab_geometry + .. _Euclidean metric: https://en.wikipedia.org/wiki/Euclidean_distance + + Notes + ----- + If weights are not specified they are assigned to nodes by drawing randomly + from the exponential distribution with rate parameter $\lambda=1$. + To specify weights from a different distribution, use the `weight` keyword + argument:: + + >>> import random + >>> n = 20 + >>> w = {i: random.expovariate(5.0) for i in range(n)} + >>> G = nx.geographical_threshold_graph(20, 50, weight=w) + + If node positions are not specified they are randomly assigned from the + uniform distribution. + + References + ---------- + .. [1] Masuda, N., Miwa, H., Konno, N.: + Geographical threshold graphs with small-world and scale-free + properties. + Physical Review E 71, 036108 (2005) + .. [2] Milan Bradonjić, Aric Hagberg and Allon G. Percus, + Giant component and connectivity in geographical threshold graphs, + in Algorithms and Models for the Web-Graph (WAW 2007), + Antony Bonato and Fan Chung (Eds), pp. 209--216, 2007 + """ + G = nx.empty_graph(n) + # If no weights are provided, choose them from an exponential + # distribution. + if weight is None: + weight = {v: seed.expovariate(1) for v in G} + # If no positions are provided, choose uniformly random vectors in + # Euclidean space of the specified dimension. + if pos is None: + pos = {v: [seed.random() for i in range(dim)] for v in G} + # If no distance metric is provided, use Euclidean distance. + if metric is None: + metric = math.dist + nx.set_node_attributes(G, weight, weight_name) + nx.set_node_attributes(G, pos, pos_name) + + # if p_dist is not supplied, use default r^-2 + if p_dist is None: + + def p_dist(r): + return r**-2 + + # Returns ``True`` if and only if the nodes whose attributes are + # ``du`` and ``dv`` should be joined, according to the threshold + # condition. + def should_join(pair): + u, v = pair + u_pos, v_pos = pos[u], pos[v] + u_weight, v_weight = weight[u], weight[v] + return (u_weight + v_weight) * p_dist(metric(u_pos, v_pos)) >= theta + + G.add_edges_from(filter(should_join, combinations(G, 2))) + return G + + +@py_random_state(6) +@nx._dispatchable(graphs=None, returns_graph=True) +def waxman_graph( + n, + beta=0.4, + alpha=0.1, + L=None, + domain=(0, 0, 1, 1), + metric=None, + seed=None, + *, + pos_name="pos", +): + r"""Returns a Waxman random graph. + + The Waxman random graph model places `n` nodes uniformly at random + in a rectangular domain. Each pair of nodes at distance `d` is + joined by an edge with probability + + .. math:: + p = \beta \exp(-d / \alpha L). + + This function implements both Waxman models, using the `L` keyword + argument. + + * Waxman-1: if `L` is not specified, it is set to be the maximum distance + between any pair of nodes. + * Waxman-2: if `L` is specified, the distance between a pair of nodes is + chosen uniformly at random from the interval `[0, L]`. + + Parameters + ---------- + n : int or iterable + Number of nodes or iterable of nodes + beta: float + Model parameter + alpha: float + Model parameter + L : float, optional + Maximum distance between nodes. If not specified, the actual distance + is calculated. + domain : four-tuple of numbers, optional + Domain size, given as a tuple of the form `(x_min, y_min, x_max, + y_max)`. + metric : function + A metric on vectors of numbers (represented as lists or + tuples). This must be a function that accepts two lists (or + tuples) as input and yields a number as output. The function + must also satisfy the four requirements of a `metric`_. + Specifically, if $d$ is the function and $x$, $y$, + and $z$ are vectors in the graph, then $d$ must satisfy + + 1. $d(x, y) \ge 0$, + 2. $d(x, y) = 0$ if and only if $x = y$, + 3. $d(x, y) = d(y, x)$, + 4. $d(x, z) \le d(x, y) + d(y, z)$. + + If this argument is not specified, the Euclidean distance metric is + used. + + .. _metric: https://en.wikipedia.org/wiki/Metric_%28mathematics%29 + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + pos_name : string, default="pos" + The name of the node attribute which represents the position + in 2D coordinates of the node in the returned graph. + + Returns + ------- + Graph + A random Waxman graph, undirected and without self-loops. Each + node has a node attribute ``'pos'`` that stores the position of + that node in Euclidean space as generated by this function. + + Examples + -------- + Specify an alternate distance metric using the ``metric`` keyword + argument. For example, to use the "`taxicab metric`_" instead of the + default `Euclidean metric`_:: + + >>> dist = lambda x, y: sum(abs(a - b) for a, b in zip(x, y)) + >>> G = nx.waxman_graph(10, 0.5, 0.1, metric=dist) + + .. _taxicab metric: https://en.wikipedia.org/wiki/Taxicab_geometry + .. _Euclidean metric: https://en.wikipedia.org/wiki/Euclidean_distance + + Notes + ----- + Starting in NetworkX 2.0 the parameters alpha and beta align with their + usual roles in the probability distribution. In earlier versions their + positions in the expression were reversed. Their position in the calling + sequence reversed as well to minimize backward incompatibility. + + References + ---------- + .. [1] B. M. Waxman, *Routing of multipoint connections*. + IEEE J. Select. Areas Commun. 6(9),(1988) 1617--1622. + """ + G = nx.empty_graph(n) + (xmin, ymin, xmax, ymax) = domain + # Each node gets a uniformly random position in the given rectangle. + pos = {v: (seed.uniform(xmin, xmax), seed.uniform(ymin, ymax)) for v in G} + nx.set_node_attributes(G, pos, pos_name) + # If no distance metric is provided, use Euclidean distance. + if metric is None: + metric = math.dist + # If the maximum distance L is not specified (that is, we are in the + # Waxman-1 model), then find the maximum distance between any pair + # of nodes. + # + # In the Waxman-1 model, join nodes randomly based on distance. In + # the Waxman-2 model, join randomly based on random l. + if L is None: + L = max(metric(x, y) for x, y in combinations(pos.values(), 2)) + + def dist(u, v): + return metric(pos[u], pos[v]) + + else: + + def dist(u, v): + return seed.random() * L + + # `pair` is the pair of nodes to decide whether to join. + def should_join(pair): + return seed.random() < beta * math.exp(-dist(*pair) / (alpha * L)) + + G.add_edges_from(filter(should_join, combinations(G, 2))) + return G + + +@py_random_state(5) +@nx._dispatchable(graphs=None, returns_graph=True) +def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): + r"""Returns a navigable small-world graph. + + A navigable small-world graph is a directed grid with additional long-range + connections that are chosen randomly. + + [...] we begin with a set of nodes [...] that are identified with the set + of lattice points in an $n \times n$ square, + $\{(i, j): i \in \{1, 2, \ldots, n\}, j \in \{1, 2, \ldots, n\}\}$, + and we define the *lattice distance* between two nodes $(i, j)$ and + $(k, l)$ to be the number of "lattice steps" separating them: + $d((i, j), (k, l)) = |k - i| + |l - j|$. + + For a universal constant $p >= 1$, the node $u$ has a directed edge to + every other node within lattice distance $p$---these are its *local + contacts*. For universal constants $q >= 0$ and $r >= 0$ we also + construct directed edges from $u$ to $q$ other nodes (the *long-range + contacts*) using independent random trials; the $i$th directed edge from + $u$ has endpoint $v$ with probability proportional to $[d(u,v)]^{-r}$. + + -- [1]_ + + Parameters + ---------- + n : int + The length of one side of the lattice; the number of nodes in + the graph is therefore $n^2$. + p : int + The diameter of short range connections. Each node is joined with every + other node within this lattice distance. + q : int + The number of long-range connections for each node. + r : float + Exponent for decaying probability of connections. The probability of + connecting to a node at lattice distance $d$ is $1/d^r$. + dim : int + Dimension of grid + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + References + ---------- + .. [1] J. Kleinberg. The small-world phenomenon: An algorithmic + perspective. Proc. 32nd ACM Symposium on Theory of Computing, 2000. + """ + if p < 1: + raise nx.NetworkXException("p must be >= 1") + if q < 0: + raise nx.NetworkXException("q must be >= 0") + if r < 0: + raise nx.NetworkXException("r must be >= 0") + + G = nx.DiGraph() + nodes = list(product(range(n), repeat=dim)) + for p1 in nodes: + probs = [0] + for p2 in nodes: + if p1 == p2: + continue + d = sum((abs(b - a) for a, b in zip(p1, p2))) + if d <= p: + G.add_edge(p1, p2) + probs.append(d**-r) + cdf = list(accumulate(probs)) + for _ in range(q): + target = nodes[bisect_left(cdf, seed.uniform(0, cdf[-1]))] + G.add_edge(p1, target) + return G + + +@py_random_state(7) +@nx._dispatchable(graphs=None, returns_graph=True) +def thresholded_random_geometric_graph( + n, + radius, + theta, + dim=2, + pos=None, + weight=None, + p=2, + seed=None, + *, + pos_name="pos", + weight_name="weight", +): + r"""Returns a thresholded random geometric graph in the unit cube. + + The thresholded random geometric graph [1] model places `n` nodes + uniformly at random in the unit cube of dimensions `dim`. Each node + `u` is assigned a weight :math:`w_u`. Two nodes `u` and `v` are + joined by an edge if they are within the maximum connection distance, + `radius` computed by the `p`-Minkowski distance and the summation of + weights :math:`w_u` + :math:`w_v` is greater than or equal + to the threshold parameter `theta`. + + Edges within `radius` of each other are determined using a KDTree when + SciPy is available. This reduces the time complexity from :math:`O(n^2)` + to :math:`O(n)`. + + Parameters + ---------- + n : int or iterable + Number of nodes or iterable of nodes + radius: float + Distance threshold value + theta: float + Threshold value + dim : int, optional + Dimension of graph + pos : dict, optional + A dictionary keyed by node with node positions as values. + weight : dict, optional + Node weights as a dictionary of numbers keyed by node. + p : float, optional (default 2) + Which Minkowski distance metric to use. `p` has to meet the condition + ``1 <= p <= infinity``. + + If this argument is not specified, the :math:`L^2` metric + (the Euclidean distance metric), p = 2 is used. + + This should not be confused with the `p` of an Erdős-Rényi random + graph, which represents probability. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + pos_name : string, default="pos" + The name of the node attribute which represents the position + in 2D coordinates of the node in the returned graph. + weight_name : string, default="weight" + The name of the node attribute which represents the weight + of the node in the returned graph. + + Returns + ------- + Graph + A thresholded random geographic graph, undirected and without + self-loops. + + Each node has a node attribute ``'pos'`` that stores the + position of that node in Euclidean space as provided by the + ``pos`` keyword argument or, if ``pos`` was not provided, as + generated by this function. Similarly, each node has a nodethre + attribute ``'weight'`` that stores the weight of that node as + provided or as generated. + + Notes + ----- + This uses a *k*-d tree to build the graph. + + References + ---------- + .. [1] http://cole-maclean.github.io/blog/files/thesis.pdf + + Examples + -------- + Default Graph: + + >>> G = nx.thresholded_random_geometric_graph(50, 0.2, 0.1) + + Custom Graph: + + The `pos` keyword argument can be used to specify node positions so you + can create an arbitrary distribution and domain for positions. + + If weights are not specified they are assigned to nodes by drawing randomly + from the exponential distribution with rate parameter :math:`\lambda=1`. + To specify weights from a different distribution, use the `weight` keyword + argument. + + For example, create a thresholded random geometric graph on 50 nodes using a 2D + Gaussian distribution of node positions with mean (0, 0) and standard deviation 2, + where nodes are joined by an edge if their sum weights drawn from + a exponential distribution with rate = 5 are >= theta = 0.1 and their + Euclidean distance is at most 0.2. + + >>> import random + >>> n = 50 + >>> pos = {i: (random.gauss(0, 2), random.gauss(0, 2)) for i in range(n)} + >>> w = {i: random.expovariate(5.0) for i in range(n)} + >>> G = nx.thresholded_random_geometric_graph(n, 0.2, 0.1, 2, pos, w) + + """ + G = nx.empty_graph(n) + G.name = f"thresholded_random_geometric_graph({n}, {radius}, {theta}, {dim})" + # If no weights are provided, choose them from an exponential + # distribution. + if weight is None: + weight = {v: seed.expovariate(1) for v in G} + # If no positions are provided, choose uniformly random vectors in + # Euclidean space of the specified dimension. + if pos is None: + pos = {v: [seed.random() for i in range(dim)] for v in G} + # If no distance metric is provided, use Euclidean distance. + nx.set_node_attributes(G, weight, weight_name) + nx.set_node_attributes(G, pos, pos_name) + + edges = ( + (u, v) + for u, v in _geometric_edges(G, radius, p, pos_name) + if weight[u] + weight[v] >= theta + ) + G.add_edges_from(edges) + return G + + +@py_random_state(5) +@nx._dispatchable(graphs=None, returns_graph=True) +def geometric_soft_configuration_graph( + *, beta, n=None, gamma=None, mean_degree=None, kappas=None, seed=None +): + r"""Returns a random graph from the geometric soft configuration model. + + The $\mathbb{S}^1$ model [1]_ is the geometric soft configuration model + which is able to explain many fundamental features of real networks such as + small-world property, heteregenous degree distributions, high level of + clustering, and self-similarity. + + In the geometric soft configuration model, a node $i$ is assigned two hidden + variables: a hidden degree $\kappa_i$, quantifying its popularity, influence, + or importance, and an angular position $\theta_i$ in a circle abstracting the + similarity space, where angular distances between nodes are a proxy for their + similarity. Focusing on the angular position, this model is often called + the $\mathbb{S}^1$ model (a one-dimensional sphere). The circle's radius is + adjusted to $R = N/2\pi$, where $N$ is the number of nodes, so that the density + is set to 1 without loss of generality. + + The connection probability between any pair of nodes increases with + the product of their hidden degrees (i.e., their combined popularities), + and decreases with the angular distance between the two nodes. + Specifically, nodes $i$ and $j$ are connected with the probability + + $p_{ij} = \frac{1}{1 + \frac{d_{ij}^\beta}{\left(\mu \kappa_i \kappa_j\right)^{\max(1, \beta)}}}$ + + where $d_{ij} = R\Delta\theta_{ij}$ is the arc length of the circle between + nodes $i$ and $j$ separated by an angular distance $\Delta\theta_{ij}$. + Parameters $\mu$ and $\beta$ (also called inverse temperature) control the + average degree and the clustering coefficient, respectively. + + It can be shown [2]_ that the model undergoes a structural phase transition + at $\beta=1$ so that for $\beta<1$ networks are unclustered in the thermodynamic + limit (when $N\to \infty$) whereas for $\beta>1$ the ensemble generates + networks with finite clustering coefficient. + + The $\mathbb{S}^1$ model can be expressed as a purely geometric model + $\mathbb{H}^2$ in the hyperbolic plane [3]_ by mapping the hidden degree of + each node into a radial coordinate as + + $r_i = \hat{R} - \frac{2 \max(1, \beta)}{\beta \zeta} \ln \left(\frac{\kappa_i}{\kappa_0}\right)$ + + where $\hat{R}$ is the radius of the hyperbolic disk and $\zeta$ is the curvature, + + $\hat{R} = \frac{2}{\zeta} \ln \left(\frac{N}{\pi}\right) + - \frac{2\max(1, \beta)}{\beta \zeta} \ln (\mu \kappa_0^2)$ + + The connection probability then reads + + $p_{ij} = \frac{1}{1 + \exp\left({\frac{\beta\zeta}{2} (x_{ij} - \hat{R})}\right)}$ + + where + + $x_{ij} = r_i + r_j + \frac{2}{\zeta} \ln \frac{\Delta\theta_{ij}}{2}$ + + is a good approximation of the hyperbolic distance between two nodes separated + by an angular distance $\Delta\theta_{ij}$ with radial coordinates $r_i$ and $r_j$. + For $\beta > 1$, the curvature $\zeta = 1$, for $\beta < 1$, $\zeta = \beta^{-1}$. + + + Parameters + ---------- + Either `n`, `gamma`, `mean_degree` are provided or `kappas`. The values of + `n`, `gamma`, `mean_degree` (if provided) are used to construct a random + kappa-dict keyed by node with values sampled from a power-law distribution. + + beta : positive number + Inverse temperature, controlling the clustering coefficient. + n : int (default: None) + Size of the network (number of nodes). + If not provided, `kappas` must be provided and holds the nodes. + gamma : float (default: None) + Exponent of the power-law distribution for hidden degrees `kappas`. + If not provided, `kappas` must be provided directly. + mean_degree : float (default: None) + The mean degree in the network. + If not provided, `kappas` must be provided directly. + kappas : dict (default: None) + A dict keyed by node to its hidden degree value. + If not provided, random values are computed based on a power-law + distribution using `n`, `gamma` and `mean_degree`. + seed : int, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + Graph + A random geometric soft configuration graph (undirected with no self-loops). + Each node has three node-attributes: + + - ``kappa`` that represents the hidden degree. + + - ``theta`` the position in the similarity space ($\mathbb{S}^1$) which is + also the angular position in the hyperbolic plane. + + - ``radius`` the radial position in the hyperbolic plane + (based on the hidden degree). + + + Examples + -------- + Generate a network with specified parameters: + + >>> G = nx.geometric_soft_configuration_graph( + ... beta=1.5, n=100, gamma=2.7, mean_degree=5 + ... ) + + Create a geometric soft configuration graph with 100 nodes. The $\beta$ parameter + is set to 1.5 and the exponent of the powerlaw distribution of the hidden + degrees is 2.7 with mean value of 5. + + Generate a network with predefined hidden degrees: + + >>> kappas = {i: 10 for i in range(100)} + >>> G = nx.geometric_soft_configuration_graph(beta=2.5, kappas=kappas) + + Create a geometric soft configuration graph with 100 nodes. The $\beta$ parameter + is set to 2.5 and all nodes with hidden degree $\kappa=10$. + + + References + ---------- + .. [1] Serrano, M. Á., Krioukov, D., & Boguñá, M. (2008). Self-similarity + of complex networks and hidden metric spaces. Physical review letters, 100(7), 078701. + + .. [2] van der Kolk, J., Serrano, M. Á., & Boguñá, M. (2022). An anomalous + topological phase transition in spatial random graphs. Communications Physics, 5(1), 245. + + .. [3] Krioukov, D., Papadopoulos, F., Kitsak, M., Vahdat, A., & Boguná, M. (2010). + Hyperbolic geometry of complex networks. Physical Review E, 82(3), 036106. + + """ + if beta <= 0: + raise nx.NetworkXError("The parameter beta cannot be smaller or equal to 0.") + + if kappas is not None: + if not all((n is None, gamma is None, mean_degree is None)): + raise nx.NetworkXError( + "When kappas is input, n, gamma and mean_degree must not be." + ) + + n = len(kappas) + mean_degree = sum(kappas) / len(kappas) + else: + if any((n is None, gamma is None, mean_degree is None)): + raise nx.NetworkXError( + "Please provide either kappas, or all 3 of: n, gamma and mean_degree." + ) + + # Generate `n` hidden degrees from a powerlaw distribution + # with given exponent `gamma` and mean value `mean_degree` + gam_ratio = (gamma - 2) / (gamma - 1) + kappa_0 = mean_degree * gam_ratio * (1 - 1 / n) / (1 - 1 / n**gam_ratio) + base = 1 - 1 / n + power = 1 / (1 - gamma) + kappas = {i: kappa_0 * (1 - seed.random() * base) ** power for i in range(n)} + + G = nx.Graph() + R = n / (2 * math.pi) + + # Approximate values for mu in the thermodynamic limit (when n -> infinity) + if beta > 1: + mu = beta * math.sin(math.pi / beta) / (2 * math.pi * mean_degree) + elif beta == 1: + mu = 1 / (2 * mean_degree * math.log(n)) + else: + mu = (1 - beta) / (2**beta * mean_degree * n ** (1 - beta)) + + # Generate random positions on a circle + thetas = {k: seed.uniform(0, 2 * math.pi) for k in kappas} + + for u in kappas: + for v in list(G): + angle = math.pi - math.fabs(math.pi - math.fabs(thetas[u] - thetas[v])) + dij = math.pow(R * angle, beta) + mu_kappas = math.pow(mu * kappas[u] * kappas[v], max(1, beta)) + p_ij = 1 / (1 + dij / mu_kappas) + + # Create an edge with a certain connection probability + if seed.random() < p_ij: + G.add_edge(u, v) + G.add_node(u) + + nx.set_node_attributes(G, thetas, "theta") + nx.set_node_attributes(G, kappas, "kappa") + + # Map hidden degrees into the radial coordinates + zeta = 1 if beta > 1 else 1 / beta + kappa_min = min(kappas.values()) + R_c = 2 * max(1, beta) / (beta * zeta) + R_hat = (2 / zeta) * math.log(n / math.pi) - R_c * math.log(mu * kappa_min) + radii = {node: R_hat - R_c * math.log(kappa) for node, kappa in kappas.items()} + nx.set_node_attributes(G, radii, "radius") + + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/harary_graph.py b/.venv/lib/python3.12/site-packages/networkx/generators/harary_graph.py new file mode 100644 index 0000000..591587d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/harary_graph.py @@ -0,0 +1,199 @@ +"""Generators for Harary graphs + +This module gives two generators for the Harary graph, which was +introduced by the famous mathematician Frank Harary in his 1962 work [H]_. +The first generator gives the Harary graph that maximizes the node +connectivity with given number of nodes and given number of edges. +The second generator gives the Harary graph that minimizes +the number of edges in the graph with given node connectivity and +number of nodes. + +References +---------- +.. [H] Harary, F. "The Maximum Connectivity of a Graph." + Proc. Nat. Acad. Sci. USA 48, 1142-1146, 1962. + +""" + +import networkx as nx +from networkx.exception import NetworkXError + +__all__ = ["hnm_harary_graph", "hkn_harary_graph"] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def hnm_harary_graph(n, m, create_using=None): + """Returns the Harary graph with given numbers of nodes and edges. + + The Harary graph $H_{n,m}$ is the graph that maximizes node connectivity + with $n$ nodes and $m$ edges. + + This maximum node connectivity is known to be floor($2m/n$). [1]_ + + Parameters + ---------- + n: integer + The number of nodes the generated graph is to contain + + m: integer + The number of edges the generated graph is to contain + + create_using : NetworkX graph constructor, optional Graph type + to create (default=nx.Graph). If graph instance, then cleared + before populated. + + Returns + ------- + NetworkX graph + The Harary graph $H_{n,m}$. + + See Also + -------- + hkn_harary_graph + + Notes + ----- + This algorithm runs in $O(m)$ time. + It is implemented by following the Reference [2]_. + + References + ---------- + .. [1] F. T. Boesch, A. Satyanarayana, and C. L. Suffel, + "A Survey of Some Network Reliability Analysis and Synthesis Results," + Networks, pp. 99-107, 2009. + + .. [2] Harary, F. "The Maximum Connectivity of a Graph." + Proc. Nat. Acad. Sci. USA 48, 1142-1146, 1962. + """ + + if n < 1: + raise NetworkXError("The number of nodes must be >= 1!") + if m < n - 1: + raise NetworkXError("The number of edges must be >= n - 1 !") + if m > n * (n - 1) // 2: + raise NetworkXError("The number of edges must be <= n(n-1)/2") + + # Construct an empty graph with n nodes first + H = nx.empty_graph(n, create_using) + # Get the floor of average node degree + d = 2 * m // n + + # Test the parity of n and d + if (n % 2 == 0) or (d % 2 == 0): + # Start with a regular graph of d degrees + offset = d // 2 + for i in range(n): + for j in range(1, offset + 1): + H.add_edge(i, (i - j) % n) + H.add_edge(i, (i + j) % n) + if d & 1: + # in case d is odd; n must be even in this case + half = n // 2 + for i in range(half): + # add edges diagonally + H.add_edge(i, i + half) + # Get the remainder of 2*m modulo n + r = 2 * m % n + if r > 0: + # add remaining edges at offset+1 + for i in range(r // 2): + H.add_edge(i, i + offset + 1) + else: + # Start with a regular graph of (d - 1) degrees + offset = (d - 1) // 2 + for i in range(n): + for j in range(1, offset + 1): + H.add_edge(i, (i - j) % n) + H.add_edge(i, (i + j) % n) + half = n // 2 + for i in range(m - n * offset): + # add the remaining m - n*offset edges between i and i+half + H.add_edge(i, (i + half) % n) + + return H + + +@nx._dispatchable(graphs=None, returns_graph=True) +def hkn_harary_graph(k, n, create_using=None): + """Returns the Harary graph with given node connectivity and node number. + + The Harary graph $H_{k,n}$ is the graph that minimizes the number of + edges needed with given node connectivity $k$ and node number $n$. + + This smallest number of edges is known to be ceil($kn/2$) [1]_. + + Parameters + ---------- + k: integer + The node connectivity of the generated graph + + n: integer + The number of nodes the generated graph is to contain + + create_using : NetworkX graph constructor, optional Graph type + to create (default=nx.Graph). If graph instance, then cleared + before populated. + + Returns + ------- + NetworkX graph + The Harary graph $H_{k,n}$. + + See Also + -------- + hnm_harary_graph + + Notes + ----- + This algorithm runs in $O(kn)$ time. + It is implemented by following the Reference [2]_. + + References + ---------- + .. [1] Weisstein, Eric W. "Harary Graph." From MathWorld--A Wolfram Web + Resource. http://mathworld.wolfram.com/HararyGraph.html. + + .. [2] Harary, F. "The Maximum Connectivity of a Graph." + Proc. Nat. Acad. Sci. USA 48, 1142-1146, 1962. + """ + + if k < 1: + raise NetworkXError("The node connectivity must be >= 1!") + if n < k + 1: + raise NetworkXError("The number of nodes must be >= k+1 !") + + # in case of connectivity 1, simply return the path graph + if k == 1: + H = nx.path_graph(n, create_using) + return H + + # Construct an empty graph with n nodes first + H = nx.empty_graph(n, create_using) + + # Test the parity of k and n + if (k % 2 == 0) or (n % 2 == 0): + # Construct a regular graph with k degrees + offset = k // 2 + for i in range(n): + for j in range(1, offset + 1): + H.add_edge(i, (i - j) % n) + H.add_edge(i, (i + j) % n) + if k & 1: + # odd degree; n must be even in this case + half = n // 2 + for i in range(half): + # add edges diagonally + H.add_edge(i, i + half) + else: + # Construct a regular graph with (k - 1) degrees + offset = (k - 1) // 2 + for i in range(n): + for j in range(1, offset + 1): + H.add_edge(i, (i - j) % n) + H.add_edge(i, (i + j) % n) + half = n // 2 + for i in range(half + 1): + # add half+1 edges between i and i+half + H.add_edge(i, (i + half) % n) + + return H diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/internet_as_graphs.py b/.venv/lib/python3.12/site-packages/networkx/generators/internet_as_graphs.py new file mode 100644 index 0000000..449d543 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/internet_as_graphs.py @@ -0,0 +1,441 @@ +"""Generates graphs resembling the Internet Autonomous System network""" + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = ["random_internet_as_graph"] + + +def uniform_int_from_avg(a, m, seed): + """Pick a random integer with uniform probability. + + Returns a random integer uniformly taken from a distribution with + minimum value 'a' and average value 'm', X~U(a,b), E[X]=m, X in N where + b = 2*m - a. + + Notes + ----- + p = (b-floor(b))/2 + X = X1 + X2; X1~U(a,floor(b)), X2~B(p) + E[X] = E[X1] + E[X2] = (floor(b)+a)/2 + (b-floor(b))/2 = (b+a)/2 = m + """ + + from math import floor + + assert m >= a + b = 2 * m - a + p = (b - floor(b)) / 2 + X1 = round(seed.random() * (floor(b) - a) + a) + if seed.random() < p: + X2 = 1 + else: + X2 = 0 + return X1 + X2 + + +def choose_pref_attach(degs, seed): + """Pick a random value, with a probability given by its weight. + + Returns a random choice among degs keys, each of which has a + probability proportional to the corresponding dictionary value. + + Parameters + ---------- + degs: dictionary + It contains the possible values (keys) and the corresponding + probabilities (values) + seed: random state + + Returns + ------- + v: object + A key of degs or None if degs is empty + """ + + if len(degs) == 0: + return None + s = sum(degs.values()) + if s == 0: + return seed.choice(list(degs.keys())) + v = seed.random() * s + + nodes = list(degs.keys()) + i = 0 + acc = degs[nodes[i]] + while v > acc: + i += 1 + acc += degs[nodes[i]] + return nodes[i] + + +class AS_graph_generator: + """Generates random internet AS graphs.""" + + def __init__(self, n, seed): + """Initializes variables. Immediate numbers are taken from [1]. + + Parameters + ---------- + n: integer + Number of graph nodes + seed: random state + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + GG: AS_graph_generator object + + References + ---------- + [1] A. Elmokashfi, A. Kvalbein and C. Dovrolis, "On the Scalability of + BGP: The Role of Topology Growth," in IEEE Journal on Selected Areas + in Communications, vol. 28, no. 8, pp. 1250-1261, October 2010. + """ + + self.seed = seed + self.n_t = min(n, round(self.seed.random() * 2 + 4)) # num of T nodes + self.n_m = round(0.15 * n) # number of M nodes + self.n_cp = round(0.05 * n) # number of CP nodes + self.n_c = max(0, n - self.n_t - self.n_m - self.n_cp) # number of C nodes + + self.d_m = 2 + (2.5 * n) / 10000 # average multihoming degree for M nodes + self.d_cp = 2 + (1.5 * n) / 10000 # avg multihoming degree for CP nodes + self.d_c = 1 + (5 * n) / 100000 # average multihoming degree for C nodes + + self.p_m_m = 1 + (2 * n) / 10000 # avg num of peer edges between M and M + self.p_cp_m = 0.2 + (2 * n) / 10000 # avg num of peer edges between CP, M + self.p_cp_cp = 0.05 + (2 * n) / 100000 # avg num of peer edges btwn CP, CP + + self.t_m = 0.375 # probability M's provider is T + self.t_cp = 0.375 # probability CP's provider is T + self.t_c = 0.125 # probability C's provider is T + + def t_graph(self): + """Generates the core mesh network of tier one nodes of a AS graph. + + Returns + ------- + G: Networkx Graph + Core network + """ + + self.G = nx.Graph() + for i in range(self.n_t): + self.G.add_node(i, type="T") + for r in self.regions: + self.regions[r].add(i) + for j in self.G.nodes(): + if i != j: + self.add_edge(i, j, "peer") + self.customers[i] = set() + self.providers[i] = set() + return self.G + + def add_edge(self, i, j, kind): + if kind == "transit": + customer = str(i) + else: + customer = "none" + self.G.add_edge(i, j, type=kind, customer=customer) + + def choose_peer_pref_attach(self, node_list): + """Pick a node with a probability weighted by its peer degree. + + Pick a node from node_list with preferential attachment + computed only on their peer degree + """ + + d = {} + for n in node_list: + d[n] = self.G.nodes[n]["peers"] + return choose_pref_attach(d, self.seed) + + def choose_node_pref_attach(self, node_list): + """Pick a node with a probability weighted by its degree. + + Pick a node from node_list with preferential attachment + computed on their degree + """ + + degs = dict(self.G.degree(node_list)) + return choose_pref_attach(degs, self.seed) + + def add_customer(self, i, j): + """Keep the dictionaries 'customers' and 'providers' consistent.""" + + self.customers[j].add(i) + self.providers[i].add(j) + for z in self.providers[j]: + self.customers[z].add(i) + self.providers[i].add(z) + + def add_node(self, i, kind, reg2prob, avg_deg, t_edge_prob): + """Add a node and its customer transit edges to the graph. + + Parameters + ---------- + i: object + Identifier of the new node + kind: string + Type of the new node. Options are: 'M' for middle node, 'CP' for + content provider and 'C' for customer. + reg2prob: float + Probability the new node can be in two different regions. + avg_deg: float + Average number of transit nodes of which node i is customer. + t_edge_prob: float + Probability node i establish a customer transit edge with a tier + one (T) node + + Returns + ------- + i: object + Identifier of the new node + """ + + regs = 1 # regions in which node resides + if self.seed.random() < reg2prob: # node is in two regions + regs = 2 + node_options = set() + + self.G.add_node(i, type=kind, peers=0) + self.customers[i] = set() + self.providers[i] = set() + self.nodes[kind].add(i) + for r in self.seed.sample(list(self.regions), regs): + node_options = node_options.union(self.regions[r]) + self.regions[r].add(i) + + edge_num = uniform_int_from_avg(1, avg_deg, self.seed) + + t_options = node_options.intersection(self.nodes["T"]) + m_options = node_options.intersection(self.nodes["M"]) + if i in m_options: + m_options.remove(i) + d = 0 + while d < edge_num and (len(t_options) > 0 or len(m_options) > 0): + if len(m_options) == 0 or ( + len(t_options) > 0 and self.seed.random() < t_edge_prob + ): # add edge to a T node + j = self.choose_node_pref_attach(t_options) + t_options.remove(j) + else: + j = self.choose_node_pref_attach(m_options) + m_options.remove(j) + self.add_edge(i, j, "transit") + self.add_customer(i, j) + d += 1 + + return i + + def add_m_peering_link(self, m, to_kind): + """Add a peering link between two middle tier (M) nodes. + + Target node j is drawn considering a preferential attachment based on + other M node peering degree. + + Parameters + ---------- + m: object + Node identifier + to_kind: string + type for target node j (must be always M) + + Returns + ------- + success: boolean + """ + + # candidates are of type 'M' and are not customers of m + node_options = self.nodes["M"].difference(self.customers[m]) + # candidates are not providers of m + node_options = node_options.difference(self.providers[m]) + # remove self + if m in node_options: + node_options.remove(m) + + # remove candidates we are already connected to + for j in self.G.neighbors(m): + if j in node_options: + node_options.remove(j) + + if len(node_options) > 0: + j = self.choose_peer_pref_attach(node_options) + self.add_edge(m, j, "peer") + self.G.nodes[m]["peers"] += 1 + self.G.nodes[j]["peers"] += 1 + return True + else: + return False + + def add_cp_peering_link(self, cp, to_kind): + """Add a peering link to a content provider (CP) node. + + Target node j can be CP or M and it is drawn uniformly among the nodes + belonging to the same region as cp. + + Parameters + ---------- + cp: object + Node identifier + to_kind: string + type for target node j (must be M or CP) + + Returns + ------- + success: boolean + """ + + node_options = set() + for r in self.regions: # options include nodes in the same region(s) + if cp in self.regions[r]: + node_options = node_options.union(self.regions[r]) + + # options are restricted to the indicated kind ('M' or 'CP') + node_options = self.nodes[to_kind].intersection(node_options) + + # remove self + if cp in node_options: + node_options.remove(cp) + + # remove nodes that are cp's providers + node_options = node_options.difference(self.providers[cp]) + + # remove nodes we are already connected to + for j in self.G.neighbors(cp): + if j in node_options: + node_options.remove(j) + + if len(node_options) > 0: + j = self.seed.sample(list(node_options), 1)[0] + self.add_edge(cp, j, "peer") + self.G.nodes[cp]["peers"] += 1 + self.G.nodes[j]["peers"] += 1 + return True + else: + return False + + def graph_regions(self, rn): + """Initializes AS network regions. + + Parameters + ---------- + rn: integer + Number of regions + """ + + self.regions = {} + for i in range(rn): + self.regions["REG" + str(i)] = set() + + def add_peering_links(self, from_kind, to_kind): + """Utility function to add peering links among node groups.""" + peer_link_method = None + if from_kind == "M": + peer_link_method = self.add_m_peering_link + m = self.p_m_m + if from_kind == "CP": + peer_link_method = self.add_cp_peering_link + if to_kind == "M": + m = self.p_cp_m + else: + m = self.p_cp_cp + + for i in self.nodes[from_kind]: + num = uniform_int_from_avg(0, m, self.seed) + for _ in range(num): + peer_link_method(i, to_kind) + + def generate(self): + """Generates a random AS network graph as described in [1]. + + Returns + ------- + G: Graph object + + Notes + ----- + The process steps are the following: first we create the core network + of tier one nodes, then we add the middle tier (M), the content + provider (CP) and the customer (C) nodes along with their transit edges + (link i,j means i is customer of j). Finally we add peering links + between M nodes, between M and CP nodes and between CP node couples. + For a detailed description of the algorithm, please refer to [1]. + + References + ---------- + [1] A. Elmokashfi, A. Kvalbein and C. Dovrolis, "On the Scalability of + BGP: The Role of Topology Growth," in IEEE Journal on Selected Areas + in Communications, vol. 28, no. 8, pp. 1250-1261, October 2010. + """ + + self.graph_regions(5) + self.customers = {} + self.providers = {} + self.nodes = {"T": set(), "M": set(), "CP": set(), "C": set()} + + self.t_graph() + self.nodes["T"] = set(self.G.nodes()) + + i = len(self.nodes["T"]) + for _ in range(self.n_m): + self.nodes["M"].add(self.add_node(i, "M", 0.2, self.d_m, self.t_m)) + i += 1 + for _ in range(self.n_cp): + self.nodes["CP"].add(self.add_node(i, "CP", 0.05, self.d_cp, self.t_cp)) + i += 1 + for _ in range(self.n_c): + self.nodes["C"].add(self.add_node(i, "C", 0, self.d_c, self.t_c)) + i += 1 + + self.add_peering_links("M", "M") + self.add_peering_links("CP", "M") + self.add_peering_links("CP", "CP") + + return self.G + + +@py_random_state(1) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_internet_as_graph(n, seed=None): + """Generates a random undirected graph resembling the Internet AS network + + Parameters + ---------- + n: integer in [1000, 10000] + Number of graph nodes + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G: Networkx Graph object + A randomly generated undirected graph + + Notes + ----- + This algorithm returns an undirected graph resembling the Internet + Autonomous System (AS) network, it uses the approach by Elmokashfi et al. + [1]_ and it grants the properties described in the related paper [1]_. + + Each node models an autonomous system, with an attribute 'type' specifying + its kind; tier-1 (T), mid-level (M), customer (C) or content-provider (CP). + Each edge models an ADV communication link (hence, bidirectional) with + attributes: + + - type: transit|peer, the kind of commercial agreement between nodes; + - customer: , the identifier of the node acting as customer + ('none' if type is peer). + + References + ---------- + .. [1] A. Elmokashfi, A. Kvalbein and C. Dovrolis, "On the Scalability of + BGP: The Role of Topology Growth," in IEEE Journal on Selected Areas + in Communications, vol. 28, no. 8, pp. 1250-1261, October 2010. + """ + + GG = AS_graph_generator(n, seed) + G = GG.generate() + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/intersection.py b/.venv/lib/python3.12/site-packages/networkx/generators/intersection.py new file mode 100644 index 0000000..e63af5b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/intersection.py @@ -0,0 +1,125 @@ +""" +Generators for random intersection graphs. +""" + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = [ + "uniform_random_intersection_graph", + "k_random_intersection_graph", + "general_random_intersection_graph", +] + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def uniform_random_intersection_graph(n, m, p, seed=None): + """Returns a uniform random intersection graph. + + Parameters + ---------- + n : int + The number of nodes in the first bipartite set (nodes) + m : int + The number of nodes in the second bipartite set (attributes) + p : float + Probability of connecting nodes between bipartite sets + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + See Also + -------- + gnp_random_graph + + References + ---------- + .. [1] K.B. Singer-Cohen, Random Intersection Graphs, 1995, + PhD thesis, Johns Hopkins University + .. [2] Fill, J. A., Scheinerman, E. R., and Singer-Cohen, K. B., + Random intersection graphs when m = !(n): + An equivalence theorem relating the evolution of the g(n, m, p) + and g(n, p) models. Random Struct. Algorithms 16, 2 (2000), 156–176. + """ + from networkx.algorithms import bipartite + + G = bipartite.random_graph(n, m, p, seed) + return nx.projected_graph(G, range(n)) + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def k_random_intersection_graph(n, m, k, seed=None): + """Returns a intersection graph with randomly chosen attribute sets for + each node that are of equal size (k). + + Parameters + ---------- + n : int + The number of nodes in the first bipartite set (nodes) + m : int + The number of nodes in the second bipartite set (attributes) + k : float + Size of attribute set to assign to each node. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + See Also + -------- + gnp_random_graph, uniform_random_intersection_graph + + References + ---------- + .. [1] Godehardt, E., and Jaworski, J. + Two models of random intersection graphs and their applications. + Electronic Notes in Discrete Mathematics 10 (2001), 129--132. + """ + G = nx.empty_graph(n + m) + mset = range(n, n + m) + for v in range(n): + targets = seed.sample(mset, k) + G.add_edges_from(zip([v] * len(targets), targets)) + return nx.projected_graph(G, range(n)) + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def general_random_intersection_graph(n, m, p, seed=None): + """Returns a random intersection graph with independent probabilities + for connections between node and attribute sets. + + Parameters + ---------- + n : int + The number of nodes in the first bipartite set (nodes) + m : int + The number of nodes in the second bipartite set (attributes) + p : list of floats of length m + Probabilities for connecting nodes to each attribute + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + See Also + -------- + gnp_random_graph, uniform_random_intersection_graph + + References + ---------- + .. [1] Nikoletseas, S. E., Raptopoulos, C., and Spirakis, P. G. + The existence and efficient construction of large independent sets + in general random intersection graphs. In ICALP (2004), J. D´ıaz, + J. Karhum¨aki, A. Lepist¨o, and D. Sannella, Eds., vol. 3142 + of Lecture Notes in Computer Science, Springer, pp. 1029–1040. + """ + if len(p) != m: + raise ValueError("Probability list p must have m elements.") + G = nx.empty_graph(n + m) + mset = range(n, n + m) + for u in range(n): + for v, q in zip(mset, p): + if seed.random() < q: + G.add_edge(u, v) + return nx.projected_graph(G, range(n)) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/interval_graph.py b/.venv/lib/python3.12/site-packages/networkx/generators/interval_graph.py new file mode 100644 index 0000000..6a3fda4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/interval_graph.py @@ -0,0 +1,70 @@ +""" +Generators for interval graph. +""" + +from collections.abc import Sequence + +import networkx as nx + +__all__ = ["interval_graph"] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def interval_graph(intervals): + """Generates an interval graph for a list of intervals given. + + In graph theory, an interval graph is an undirected graph formed from a set + of closed intervals on the real line, with a vertex for each interval + and an edge between vertices whose intervals intersect. + It is the intersection graph of the intervals. + + More information can be found at: + https://en.wikipedia.org/wiki/Interval_graph + + Parameters + ---------- + intervals : a sequence of intervals, say (l, r) where l is the left end, + and r is the right end of the closed interval. + + Returns + ------- + G : networkx graph + + Examples + -------- + >>> intervals = [(-2, 3), [1, 4], (2, 3), (4, 6)] + >>> G = nx.interval_graph(intervals) + >>> sorted(G.edges) + [((-2, 3), (1, 4)), ((-2, 3), (2, 3)), ((1, 4), (2, 3)), ((1, 4), (4, 6))] + + Raises + ------ + :exc:`TypeError` + if `intervals` contains None or an element which is not + collections.abc.Sequence or not a length of 2. + :exc:`ValueError` + if `intervals` contains an interval such that min1 > max1 + where min1,max1 = interval + """ + intervals = list(intervals) + for interval in intervals: + if not (isinstance(interval, Sequence) and len(interval) == 2): + raise TypeError( + "Each interval must have length 2, and be a " + "collections.abc.Sequence such as tuple or list." + ) + if interval[0] > interval[1]: + raise ValueError(f"Interval must have lower value first. Got {interval}") + + graph = nx.Graph() + + tupled_intervals = [tuple(interval) for interval in intervals] + graph.add_nodes_from(tupled_intervals) + + while tupled_intervals: + min1, max1 = interval1 = tupled_intervals.pop() + for interval2 in tupled_intervals: + min2, max2 = interval2 + if max1 >= min2 and max2 >= min1: + graph.add_edge(interval1, interval2) + return graph diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/joint_degree_seq.py b/.venv/lib/python3.12/site-packages/networkx/generators/joint_degree_seq.py new file mode 100644 index 0000000..c426df9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/joint_degree_seq.py @@ -0,0 +1,664 @@ +"""Generate graphs with a given joint degree and directed joint degree""" + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = [ + "is_valid_joint_degree", + "is_valid_directed_joint_degree", + "joint_degree_graph", + "directed_joint_degree_graph", +] + + +@nx._dispatchable(graphs=None) +def is_valid_joint_degree(joint_degrees): + """Checks whether the given joint degree dictionary is realizable. + + A *joint degree dictionary* is a dictionary of dictionaries, in + which entry ``joint_degrees[k][l]`` is an integer representing the + number of edges joining nodes of degree *k* with nodes of degree + *l*. Such a dictionary is realizable as a simple graph if and only + if the following conditions are satisfied. + + - each entry must be an integer, + - the total number of nodes of degree *k*, computed by + ``sum(joint_degrees[k].values()) / k``, must be an integer, + - the total number of edges joining nodes of degree *k* with + nodes of degree *l* cannot exceed the total number of possible edges, + - each diagonal entry ``joint_degrees[k][k]`` must be even (this is + a convention assumed by the :func:`joint_degree_graph` function). + + + Parameters + ---------- + joint_degrees : dictionary of dictionary of integers + A joint degree dictionary in which entry ``joint_degrees[k][l]`` + is the number of edges joining nodes of degree *k* with nodes of + degree *l*. + + Returns + ------- + bool + Whether the given joint degree dictionary is realizable as a + simple graph. + + References + ---------- + .. [1] M. Gjoka, M. Kurant, A. Markopoulou, "2.5K Graphs: from Sampling + to Generation", IEEE Infocom, 2013. + .. [2] I. Stanton, A. Pinar, "Constructing and sampling graphs with a + prescribed joint degree distribution", Journal of Experimental + Algorithmics, 2012. + """ + + degree_count = {} + for k in joint_degrees: + if k > 0: + k_size = sum(joint_degrees[k].values()) / k + if not k_size.is_integer(): + return False + degree_count[k] = k_size + + for k in joint_degrees: + for l in joint_degrees[k]: + if not float(joint_degrees[k][l]).is_integer(): + return False + + if (k != l) and (joint_degrees[k][l] > degree_count[k] * degree_count[l]): + return False + elif k == l: + if joint_degrees[k][k] > degree_count[k] * (degree_count[k] - 1): + return False + if joint_degrees[k][k] % 2 != 0: + return False + + # if all above conditions have been satisfied then the input + # joint degree is realizable as a simple graph. + return True + + +def _neighbor_switch(G, w, unsat, h_node_residual, avoid_node_id=None): + """Releases one free stub for ``w``, while preserving joint degree in G. + + Parameters + ---------- + G : NetworkX graph + Graph in which the neighbor switch will take place. + w : integer + Node id for which we will execute this neighbor switch. + unsat : set of integers + Set of unsaturated node ids that have the same degree as w. + h_node_residual: dictionary of integers + Keeps track of the remaining stubs for a given node. + avoid_node_id: integer + Node id to avoid when selecting w_prime. + + Notes + ----- + First, it selects *w_prime*, an unsaturated node that has the same degree + as ``w``. Second, it selects *switch_node*, a neighbor node of ``w`` that + is not connected to *w_prime*. Then it executes an edge swap i.e. removes + (``w``,*switch_node*) and adds (*w_prime*,*switch_node*). Gjoka et. al. [1] + prove that such an edge swap is always possible. + + References + ---------- + .. [1] M. Gjoka, B. Tillman, A. Markopoulou, "Construction of Simple + Graphs with a Target Joint Degree Matrix and Beyond", IEEE Infocom, '15 + """ + + if (avoid_node_id is None) or (h_node_residual[avoid_node_id] > 1): + # select unsaturated node w_prime that has the same degree as w + w_prime = next(iter(unsat)) + else: + # assume that the node pair (v,w) has been selected for connection. if + # - neighbor_switch is called for node w, + # - nodes v and w have the same degree, + # - node v=avoid_node_id has only one stub left, + # then prevent v=avoid_node_id from being selected as w_prime. + + iter_var = iter(unsat) + while True: + w_prime = next(iter_var) + if w_prime != avoid_node_id: + break + + # select switch_node, a neighbor of w, that is not connected to w_prime + w_prime_neighbs = G[w_prime] # slightly faster declaring this variable + for v in G[w]: + if (v not in w_prime_neighbs) and (v != w_prime): + switch_node = v + break + + # remove edge (w,switch_node), add edge (w_prime,switch_node) and update + # data structures + G.remove_edge(w, switch_node) + G.add_edge(w_prime, switch_node) + h_node_residual[w] += 1 + h_node_residual[w_prime] -= 1 + if h_node_residual[w_prime] == 0: + unsat.remove(w_prime) + + +@py_random_state(1) +@nx._dispatchable(graphs=None, returns_graph=True) +def joint_degree_graph(joint_degrees, seed=None): + """Generates a random simple graph with the given joint degree dictionary. + + Parameters + ---------- + joint_degrees : dictionary of dictionary of integers + A joint degree dictionary in which entry ``joint_degrees[k][l]`` is the + number of edges joining nodes of degree *k* with nodes of degree *l*. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : Graph + A graph with the specified joint degree dictionary. + + Raises + ------ + NetworkXError + If *joint_degrees* dictionary is not realizable. + + Notes + ----- + In each iteration of the "while loop" the algorithm picks two disconnected + nodes *v* and *w*, of degree *k* and *l* correspondingly, for which + ``joint_degrees[k][l]`` has not reached its target yet. It then adds + edge (*v*, *w*) and increases the number of edges in graph G by one. + + The intelligence of the algorithm lies in the fact that it is always + possible to add an edge between such disconnected nodes *v* and *w*, + even if one or both nodes do not have free stubs. That is made possible by + executing a "neighbor switch", an edge rewiring move that releases + a free stub while keeping the joint degree of G the same. + + The algorithm continues for E (number of edges) iterations of + the "while loop", at the which point all entries of the given + ``joint_degrees[k][l]`` have reached their target values and the + construction is complete. + + References + ---------- + .. [1] M. Gjoka, B. Tillman, A. Markopoulou, "Construction of Simple + Graphs with a Target Joint Degree Matrix and Beyond", IEEE Infocom, '15 + + Examples + -------- + >>> joint_degrees = { + ... 1: {4: 1}, + ... 2: {2: 2, 3: 2, 4: 2}, + ... 3: {2: 2, 4: 1}, + ... 4: {1: 1, 2: 2, 3: 1}, + ... } + >>> G = nx.joint_degree_graph(joint_degrees) + >>> + """ + + if not is_valid_joint_degree(joint_degrees): + msg = "Input joint degree dict not realizable as a simple graph" + raise nx.NetworkXError(msg) + + # compute degree count from joint_degrees + degree_count = {k: sum(l.values()) // k for k, l in joint_degrees.items() if k > 0} + + # start with empty N-node graph + N = sum(degree_count.values()) + G = nx.empty_graph(N) + + # for a given degree group, keep the list of all node ids + h_degree_nodelist = {} + + # for a given node, keep track of the remaining stubs + h_node_residual = {} + + # populate h_degree_nodelist and h_node_residual + nodeid = 0 + for degree, num_nodes in degree_count.items(): + h_degree_nodelist[degree] = range(nodeid, nodeid + num_nodes) + for v in h_degree_nodelist[degree]: + h_node_residual[v] = degree + nodeid += int(num_nodes) + + # iterate over every degree pair (k,l) and add the number of edges given + # for each pair + for k in joint_degrees: + for l in joint_degrees[k]: + # n_edges_add is the number of edges to add for the + # degree pair (k,l) + n_edges_add = joint_degrees[k][l] + + if (n_edges_add > 0) and (k >= l): + # number of nodes with degree k and l + k_size = degree_count[k] + l_size = degree_count[l] + + # k_nodes and l_nodes consist of all nodes of degree k and l + k_nodes = h_degree_nodelist[k] + l_nodes = h_degree_nodelist[l] + + # k_unsat and l_unsat consist of nodes of degree k and l that + # are unsaturated (nodes that have at least 1 available stub) + k_unsat = {v for v in k_nodes if h_node_residual[v] > 0} + + if k != l: + l_unsat = {w for w in l_nodes if h_node_residual[w] > 0} + else: + l_unsat = k_unsat + n_edges_add = joint_degrees[k][l] // 2 + + while n_edges_add > 0: + # randomly pick nodes v and w that have degrees k and l + v = k_nodes[seed.randrange(k_size)] + w = l_nodes[seed.randrange(l_size)] + + # if nodes v and w are disconnected then attempt to connect + if not G.has_edge(v, w) and (v != w): + # if node v has no free stubs then do neighbor switch + if h_node_residual[v] == 0: + _neighbor_switch(G, v, k_unsat, h_node_residual) + + # if node w has no free stubs then do neighbor switch + if h_node_residual[w] == 0: + if k != l: + _neighbor_switch(G, w, l_unsat, h_node_residual) + else: + _neighbor_switch( + G, w, l_unsat, h_node_residual, avoid_node_id=v + ) + + # add edge (v, w) and update data structures + G.add_edge(v, w) + h_node_residual[v] -= 1 + h_node_residual[w] -= 1 + n_edges_add -= 1 + + if h_node_residual[v] == 0: + k_unsat.discard(v) + if h_node_residual[w] == 0: + l_unsat.discard(w) + return G + + +@nx._dispatchable(graphs=None) +def is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): + """Checks whether the given directed joint degree input is realizable + + Parameters + ---------- + in_degrees : list of integers + in degree sequence contains the in degrees of nodes. + out_degrees : list of integers + out degree sequence contains the out degrees of nodes. + nkk : dictionary of dictionary of integers + directed joint degree dictionary. for nodes of out degree k (first + level of dict) and nodes of in degree l (second level of dict) + describes the number of edges. + + Returns + ------- + boolean + returns true if given input is realizable, else returns false. + + Notes + ----- + Here is the list of conditions that the inputs (in/out degree sequences, + nkk) need to satisfy for simple directed graph realizability: + + - Condition 0: in_degrees and out_degrees have the same length + - Condition 1: nkk[k][l] is integer for all k,l + - Condition 2: sum(nkk[k])/k = number of nodes with partition id k, is an + integer and matching degree sequence + - Condition 3: number of edges and non-chords between k and l cannot exceed + maximum possible number of edges + + + References + ---------- + [1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka, + "Construction of Directed 2K Graphs". In Proc. of KDD 2017. + """ + V = {} # number of nodes with in/out degree. + forbidden = {} + if len(in_degrees) != len(out_degrees): + return False + + for idx in range(len(in_degrees)): + i = in_degrees[idx] + o = out_degrees[idx] + V[(i, 0)] = V.get((i, 0), 0) + 1 + V[(o, 1)] = V.get((o, 1), 0) + 1 + + forbidden[(o, i)] = forbidden.get((o, i), 0) + 1 + + S = {} # number of edges going from in/out degree nodes. + for k in nkk: + for l in nkk[k]: + val = nkk[k][l] + if not float(val).is_integer(): # condition 1 + return False + + if val > 0: + S[(k, 1)] = S.get((k, 1), 0) + val + S[(l, 0)] = S.get((l, 0), 0) + val + # condition 3 + if val + forbidden.get((k, l), 0) > V[(k, 1)] * V[(l, 0)]: + return False + + return all(S[s] / s[0] == V[s] for s in S) + + +def _directed_neighbor_switch( + G, w, unsat, h_node_residual_out, chords, h_partition_in, partition +): + """Releases one free stub for node w, while preserving joint degree in G. + + Parameters + ---------- + G : networkx directed graph + graph within which the edge swap will take place. + w : integer + node id for which we need to perform a neighbor switch. + unsat: set of integers + set of node ids that have the same degree as w and are unsaturated. + h_node_residual_out: dict of integers + for a given node, keeps track of the remaining stubs to be added. + chords: set of tuples + keeps track of available positions to add edges. + h_partition_in: dict of integers + for a given node, keeps track of its partition id (in degree). + partition: integer + partition id to check if chords have to be updated. + + Notes + ----- + First, it selects node w_prime that (1) has the same degree as w and + (2) is unsaturated. Then, it selects node v, a neighbor of w, that is + not connected to w_prime and does an edge swap i.e. removes (w,v) and + adds (w_prime,v). If neighbor switch is not possible for w using + w_prime and v, then return w_prime; in [1] it's proven that + such unsaturated nodes can be used. + + References + ---------- + [1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka, + "Construction of Directed 2K Graphs". In Proc. of KDD 2017. + """ + w_prime = unsat.pop() + unsat.add(w_prime) + # select node t, a neighbor of w, that is not connected to w_prime + w_neighbs = list(G.successors(w)) + # slightly faster declaring this variable + w_prime_neighbs = list(G.successors(w_prime)) + + for v in w_neighbs: + if (v not in w_prime_neighbs) and w_prime != v: + # removes (w,v), add (w_prime,v) and update data structures + G.remove_edge(w, v) + G.add_edge(w_prime, v) + + if h_partition_in[v] == partition: + chords.add((w, v)) + chords.discard((w_prime, v)) + + h_node_residual_out[w] += 1 + h_node_residual_out[w_prime] -= 1 + if h_node_residual_out[w_prime] == 0: + unsat.remove(w_prime) + return None + + # If neighbor switch didn't work, use unsaturated node + return w_prime + + +def _directed_neighbor_switch_rev( + G, w, unsat, h_node_residual_in, chords, h_partition_out, partition +): + """The reverse of directed_neighbor_switch. + + Parameters + ---------- + G : networkx directed graph + graph within which the edge swap will take place. + w : integer + node id for which we need to perform a neighbor switch. + unsat: set of integers + set of node ids that have the same degree as w and are unsaturated. + h_node_residual_in: dict of integers + for a given node, keeps track of the remaining stubs to be added. + chords: set of tuples + keeps track of available positions to add edges. + h_partition_out: dict of integers + for a given node, keeps track of its partition id (out degree). + partition: integer + partition id to check if chords have to be updated. + + Notes + ----- + Same operation as directed_neighbor_switch except it handles this operation + for incoming edges instead of outgoing. + """ + w_prime = unsat.pop() + unsat.add(w_prime) + # slightly faster declaring these as variables. + w_neighbs = list(G.predecessors(w)) + w_prime_neighbs = list(G.predecessors(w_prime)) + # select node v, a neighbor of w, that is not connected to w_prime. + for v in w_neighbs: + if (v not in w_prime_neighbs) and w_prime != v: + # removes (v,w), add (v,w_prime) and update data structures. + G.remove_edge(v, w) + G.add_edge(v, w_prime) + if h_partition_out[v] == partition: + chords.add((v, w)) + chords.discard((v, w_prime)) + + h_node_residual_in[w] += 1 + h_node_residual_in[w_prime] -= 1 + if h_node_residual_in[w_prime] == 0: + unsat.remove(w_prime) + return None + + # If neighbor switch didn't work, use the unsaturated node. + return w_prime + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def directed_joint_degree_graph(in_degrees, out_degrees, nkk, seed=None): + """Generates a random simple directed graph with the joint degree. + + Parameters + ---------- + degree_seq : list of tuples (of size 3) + degree sequence contains tuples of nodes with node id, in degree and + out degree. + nkk : dictionary of dictionary of integers + directed joint degree dictionary, for nodes of out degree k (first + level of dict) and nodes of in degree l (second level of dict) + describes the number of edges. + seed : hashable object, optional + Seed for random number generator. + + Returns + ------- + G : Graph + A directed graph with the specified inputs. + + Raises + ------ + NetworkXError + If degree_seq and nkk are not realizable as a simple directed graph. + + + Notes + ----- + Similarly to the undirected version: + In each iteration of the "while loop" the algorithm picks two disconnected + nodes v and w, of degree k and l correspondingly, for which nkk[k][l] has + not reached its target yet i.e. (for given k,l): n_edges_add < nkk[k][l]. + It then adds edge (v,w) and always increases the number of edges in graph G + by one. + + The intelligence of the algorithm lies in the fact that it is always + possible to add an edge between disconnected nodes v and w, for which + nkk[degree(v)][degree(w)] has not reached its target, even if one or both + nodes do not have free stubs. If either node v or w does not have a free + stub, we perform a "neighbor switch", an edge rewiring move that releases a + free stub while keeping nkk the same. + + The difference for the directed version lies in the fact that neighbor + switches might not be able to rewire, but in these cases unsaturated nodes + can be reassigned to use instead, see [1] for detailed description and + proofs. + + The algorithm continues for E (number of edges in the graph) iterations of + the "while loop", at which point all entries of the given nkk[k][l] have + reached their target values and the construction is complete. + + References + ---------- + [1] B. Tillman, A. Markopoulou, C. T. Butts & M. Gjoka, + "Construction of Directed 2K Graphs". In Proc. of KDD 2017. + + Examples + -------- + >>> in_degrees = [0, 1, 1, 2] + >>> out_degrees = [1, 1, 1, 1] + >>> nkk = {1: {1: 2, 2: 2}} + >>> G = nx.directed_joint_degree_graph(in_degrees, out_degrees, nkk) + >>> + """ + if not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk): + msg = "Input is not realizable as a simple graph" + raise nx.NetworkXError(msg) + + # start with an empty directed graph. + G = nx.DiGraph() + + # for a given group, keep the list of all node ids. + h_degree_nodelist_in = {} + h_degree_nodelist_out = {} + # for a given group, keep the list of all unsaturated node ids. + h_degree_nodelist_in_unsat = {} + h_degree_nodelist_out_unsat = {} + # for a given node, keep track of the remaining stubs to be added. + h_node_residual_out = {} + h_node_residual_in = {} + # for a given node, keep track of the partition id. + h_partition_out = {} + h_partition_in = {} + # keep track of non-chords between pairs of partition ids. + non_chords = {} + + # populate data structures + for idx, i in enumerate(in_degrees): + idx = int(idx) + if i > 0: + h_degree_nodelist_in.setdefault(i, []) + h_degree_nodelist_in_unsat.setdefault(i, set()) + h_degree_nodelist_in[i].append(idx) + h_degree_nodelist_in_unsat[i].add(idx) + h_node_residual_in[idx] = i + h_partition_in[idx] = i + + for idx, o in enumerate(out_degrees): + o = out_degrees[idx] + non_chords[(o, in_degrees[idx])] = non_chords.get((o, in_degrees[idx]), 0) + 1 + idx = int(idx) + if o > 0: + h_degree_nodelist_out.setdefault(o, []) + h_degree_nodelist_out_unsat.setdefault(o, set()) + h_degree_nodelist_out[o].append(idx) + h_degree_nodelist_out_unsat[o].add(idx) + h_node_residual_out[idx] = o + h_partition_out[idx] = o + + G.add_node(idx) + + nk_in = {} + nk_out = {} + for p in h_degree_nodelist_in: + nk_in[p] = len(h_degree_nodelist_in[p]) + for p in h_degree_nodelist_out: + nk_out[p] = len(h_degree_nodelist_out[p]) + + # iterate over every degree pair (k,l) and add the number of edges given + # for each pair. + for k in nkk: + for l in nkk[k]: + n_edges_add = nkk[k][l] + + if n_edges_add > 0: + # chords contains a random set of potential edges. + chords = set() + + k_len = nk_out[k] + l_len = nk_in[l] + chords_sample = seed.sample( + range(k_len * l_len), n_edges_add + non_chords.get((k, l), 0) + ) + + num = 0 + while len(chords) < n_edges_add: + i = h_degree_nodelist_out[k][chords_sample[num] % k_len] + j = h_degree_nodelist_in[l][chords_sample[num] // k_len] + num += 1 + if i != j: + chords.add((i, j)) + + # k_unsat and l_unsat consist of nodes of in/out degree k and l + # that are unsaturated i.e. those nodes that have at least one + # available stub + k_unsat = h_degree_nodelist_out_unsat[k] + l_unsat = h_degree_nodelist_in_unsat[l] + + while n_edges_add > 0: + v, w = chords.pop() + chords.add((v, w)) + + # if node v has no free stubs then do neighbor switch. + if h_node_residual_out[v] == 0: + _v = _directed_neighbor_switch( + G, + v, + k_unsat, + h_node_residual_out, + chords, + h_partition_in, + l, + ) + if _v is not None: + v = _v + + # if node w has no free stubs then do neighbor switch. + if h_node_residual_in[w] == 0: + _w = _directed_neighbor_switch_rev( + G, + w, + l_unsat, + h_node_residual_in, + chords, + h_partition_out, + k, + ) + if _w is not None: + w = _w + + # add edge (v,w) and update data structures. + G.add_edge(v, w) + h_node_residual_out[v] -= 1 + h_node_residual_in[w] -= 1 + n_edges_add -= 1 + chords.discard((v, w)) + + if h_node_residual_out[v] == 0: + k_unsat.discard(v) + if h_node_residual_in[w] == 0: + l_unsat.discard(w) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/lattice.py b/.venv/lib/python3.12/site-packages/networkx/generators/lattice.py new file mode 100644 index 0000000..95e520d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/lattice.py @@ -0,0 +1,367 @@ +"""Functions for generating grid graphs and lattices + +The :func:`grid_2d_graph`, :func:`triangular_lattice_graph`, and +:func:`hexagonal_lattice_graph` functions correspond to the three +`regular tilings of the plane`_, the square, triangular, and hexagonal +tilings, respectively. :func:`grid_graph` and :func:`hypercube_graph` +are similar for arbitrary dimensions. Useful relevant discussion can +be found about `Triangular Tiling`_, and `Square, Hex and Triangle Grids`_ + +.. _regular tilings of the plane: https://en.wikipedia.org/wiki/List_of_regular_polytopes_and_compounds#Euclidean_tilings +.. _Square, Hex and Triangle Grids: http://www-cs-students.stanford.edu/~amitp/game-programming/grids/ +.. _Triangular Tiling: https://en.wikipedia.org/wiki/Triangular_tiling + +""" + +from itertools import repeat +from math import sqrt + +import networkx as nx +from networkx.classes import set_node_attributes +from networkx.exception import NetworkXError +from networkx.generators.classic import cycle_graph, empty_graph, path_graph +from networkx.relabel import relabel_nodes +from networkx.utils import flatten, nodes_or_number, pairwise + +__all__ = [ + "grid_2d_graph", + "grid_graph", + "hypercube_graph", + "triangular_lattice_graph", + "hexagonal_lattice_graph", +] + + +@nx._dispatchable(graphs=None, returns_graph=True) +@nodes_or_number([0, 1]) +def grid_2d_graph(m, n, periodic=False, create_using=None): + """Returns the two-dimensional grid graph. + + The grid graph has each node connected to its four nearest neighbors. + + Parameters + ---------- + m, n : int or iterable container of nodes + If an integer, nodes are from `range(n)`. + If a container, elements become the coordinate of the nodes. + + periodic : bool or iterable + If `periodic` is True, both dimensions are periodic. If False, none + are periodic. If `periodic` is iterable, it should yield 2 bool + values indicating whether the 1st and 2nd axes, respectively, are + periodic. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + NetworkX graph + The (possibly periodic) grid graph of the specified dimensions. + + """ + G = empty_graph(0, create_using) + row_name, rows = m + col_name, cols = n + G.add_nodes_from((i, j) for i in rows for j in cols) + G.add_edges_from(((i, j), (pi, j)) for pi, i in pairwise(rows) for j in cols) + G.add_edges_from(((i, j), (i, pj)) for i in rows for pj, j in pairwise(cols)) + + try: + periodic_r, periodic_c = periodic + except TypeError: + periodic_r = periodic_c = periodic + + if periodic_r and len(rows) > 2: + first = rows[0] + last = rows[-1] + G.add_edges_from(((first, j), (last, j)) for j in cols) + if periodic_c and len(cols) > 2: + first = cols[0] + last = cols[-1] + G.add_edges_from(((i, first), (i, last)) for i in rows) + # both directions for directed + if G.is_directed(): + G.add_edges_from((v, u) for u, v in G.edges()) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def grid_graph(dim, periodic=False): + """Returns the *n*-dimensional grid graph. + + The dimension *n* is the length of the list `dim` and the size in + each dimension is the value of the corresponding list element. + + Parameters + ---------- + dim : list or tuple of numbers or iterables of nodes + 'dim' is a tuple or list with, for each dimension, either a number + that is the size of that dimension or an iterable of nodes for + that dimension. The dimension of the grid_graph is the length + of `dim`. + + periodic : bool or iterable + If `periodic` is True, all dimensions are periodic. If False all + dimensions are not periodic. If `periodic` is iterable, it should + yield `dim` bool values each of which indicates whether the + corresponding axis is periodic. + + Returns + ------- + NetworkX graph + The (possibly periodic) grid graph of the specified dimensions. + + Examples + -------- + To produce a 2 by 3 by 4 grid graph, a graph on 24 nodes: + + >>> from networkx import grid_graph + >>> G = grid_graph(dim=(2, 3, 4)) + >>> len(G) + 24 + >>> G = grid_graph(dim=(range(7, 9), range(3, 6))) + >>> len(G) + 6 + """ + from networkx.algorithms.operators.product import cartesian_product + + if not dim: + return empty_graph(0) + + try: + func = (cycle_graph if p else path_graph for p in periodic) + except TypeError: + func = repeat(cycle_graph if periodic else path_graph) + + G = next(func)(dim[0]) + for current_dim in dim[1:]: + Gnew = next(func)(current_dim) + G = cartesian_product(Gnew, G) + # graph G is done but has labels of the form (1, (2, (3, 1))) so relabel + H = relabel_nodes(G, flatten) + return H + + +@nx._dispatchable(graphs=None, returns_graph=True) +def hypercube_graph(n): + """Returns the *n*-dimensional hypercube graph. + + The nodes are the integers between 0 and ``2 ** n - 1``, inclusive. + + For more information on the hypercube graph, see the Wikipedia + article `Hypercube graph`_. + + .. _Hypercube graph: https://en.wikipedia.org/wiki/Hypercube_graph + + Parameters + ---------- + n : int + The dimension of the hypercube. + The number of nodes in the graph will be ``2 ** n``. + + Returns + ------- + NetworkX graph + The hypercube graph of dimension *n*. + """ + dim = n * [2] + G = grid_graph(dim) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def triangular_lattice_graph( + m, n, periodic=False, with_positions=True, create_using=None +): + r"""Returns the $m$ by $n$ triangular lattice graph. + + The `triangular lattice graph`_ is a two-dimensional `grid graph`_ in + which each square unit has a diagonal edge (each grid unit has a chord). + + The returned graph has $m$ rows and $n$ columns of triangles. Rows and + columns include both triangles pointing up and down. Rows form a strip + of constant height. Columns form a series of diamond shapes, staggered + with the columns on either side. Another way to state the size is that + the nodes form a grid of `m+1` rows and `(n + 1) // 2` columns. + The odd row nodes are shifted horizontally relative to the even rows. + + Directed graph types have edges pointed up or right. + + Positions of nodes are computed by default or `with_positions is True`. + The position of each node (embedded in a euclidean plane) is stored in + the graph using equilateral triangles with sidelength 1. + The height between rows of nodes is thus $\sqrt(3)/2$. + Nodes lie in the first quadrant with the node $(0, 0)$ at the origin. + + .. _triangular lattice graph: http://mathworld.wolfram.com/TriangularGrid.html + .. _grid graph: http://www-cs-students.stanford.edu/~amitp/game-programming/grids/ + .. _Triangular Tiling: https://en.wikipedia.org/wiki/Triangular_tiling + + Parameters + ---------- + m : int + The number of rows in the lattice. + + n : int + The number of columns in the lattice. + + periodic : bool (default: False) + If True, join the boundary vertices of the grid using periodic + boundary conditions. The join between boundaries is the final row + and column of triangles. This means there is one row and one column + fewer nodes for the periodic lattice. Periodic lattices require + `m >= 3`, `n >= 5` and are allowed but misaligned if `m` or `n` are odd + + with_positions : bool (default: True) + Store the coordinates of each node in the graph node attribute 'pos'. + The coordinates provide a lattice with equilateral triangles. + Periodic positions shift the nodes vertically in a nonlinear way so + the edges don't overlap so much. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + NetworkX graph + The *m* by *n* triangular lattice graph. + """ + H = empty_graph(0, create_using) + if n == 0 or m == 0: + return H + if periodic: + if n < 5 or m < 3: + msg = f"m > 2 and n > 4 required for periodic. m={m}, n={n}" + raise NetworkXError(msg) + + N = (n + 1) // 2 # number of nodes in row + rows = range(m + 1) + cols = range(N + 1) + # Make grid + H.add_edges_from(((i, j), (i + 1, j)) for j in rows for i in cols[:N]) + H.add_edges_from(((i, j), (i, j + 1)) for j in rows[:m] for i in cols) + # add diagonals + H.add_edges_from(((i, j), (i + 1, j + 1)) for j in rows[1:m:2] for i in cols[:N]) + H.add_edges_from(((i + 1, j), (i, j + 1)) for j in rows[:m:2] for i in cols[:N]) + # identify boundary nodes if periodic + from networkx.algorithms.minors import contracted_nodes + + if periodic is True: + for i in cols: + H = contracted_nodes(H, (i, 0), (i, m)) + for j in rows[:m]: + H = contracted_nodes(H, (0, j), (N, j)) + elif n % 2: + # remove extra nodes + H.remove_nodes_from((N, j) for j in rows[1::2]) + + # Add position node attributes + if with_positions: + ii = (i for i in cols for j in rows) + jj = (j for i in cols for j in rows) + xx = (0.5 * (j % 2) + i for i in cols for j in rows) + h = sqrt(3) / 2 + if periodic: + yy = (h * j + 0.01 * i * i for i in cols for j in rows) + else: + yy = (h * j for i in cols for j in rows) + pos = {(i, j): (x, y) for i, j, x, y in zip(ii, jj, xx, yy) if (i, j) in H} + set_node_attributes(H, pos, "pos") + return H + + +@nx._dispatchable(graphs=None, returns_graph=True) +def hexagonal_lattice_graph( + m, n, periodic=False, with_positions=True, create_using=None +): + """Returns an `m` by `n` hexagonal lattice graph. + + The *hexagonal lattice graph* is a graph whose nodes and edges are + the `hexagonal tiling`_ of the plane. + + The returned graph will have `m` rows and `n` columns of hexagons. + `Odd numbered columns`_ are shifted up relative to even numbered columns. + + Positions of nodes are computed by default or `with_positions is True`. + Node positions creating the standard embedding in the plane + with sidelength 1 and are stored in the node attribute 'pos'. + `pos = nx.get_node_attributes(G, 'pos')` creates a dict ready for drawing. + + .. _hexagonal tiling: https://en.wikipedia.org/wiki/Hexagonal_tiling + .. _Odd numbered columns: http://www-cs-students.stanford.edu/~amitp/game-programming/grids/ + + Parameters + ---------- + m : int + The number of rows of hexagons in the lattice. + + n : int + The number of columns of hexagons in the lattice. + + periodic : bool + Whether to make a periodic grid by joining the boundary vertices. + For this to work `n` must be even and both `n > 1` and `m > 1`. + The periodic connections create another row and column of hexagons + so these graphs have fewer nodes as boundary nodes are identified. + + with_positions : bool (default: True) + Store the coordinates of each node in the graph node attribute 'pos'. + The coordinates provide a lattice with vertical columns of hexagons + offset to interleave and cover the plane. + Periodic positions shift the nodes vertically in a nonlinear way so + the edges don't overlap so much. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + If graph is directed, edges will point up or right. + + Returns + ------- + NetworkX graph + The *m* by *n* hexagonal lattice graph. + """ + G = empty_graph(0, create_using) + if m == 0 or n == 0: + return G + if periodic and (n % 2 == 1 or m < 2 or n < 2): + msg = "periodic hexagonal lattice needs m > 1, n > 1 and even n" + raise NetworkXError(msg) + + M = 2 * m # twice as many nodes as hexagons vertically + rows = range(M + 2) + cols = range(n + 1) + # make lattice + col_edges = (((i, j), (i, j + 1)) for i in cols for j in rows[: M + 1]) + row_edges = (((i, j), (i + 1, j)) for i in cols[:n] for j in rows if i % 2 == j % 2) + G.add_edges_from(col_edges) + G.add_edges_from(row_edges) + # Remove corner nodes with one edge + G.remove_node((0, M + 1)) + G.remove_node((n, (M + 1) * (n % 2))) + + # identify boundary nodes if periodic + from networkx.algorithms.minors import contracted_nodes + + if periodic: + for i in cols[:n]: + G = contracted_nodes(G, (i, 0), (i, M)) + for i in cols[1:]: + G = contracted_nodes(G, (i, 1), (i, M + 1)) + for j in rows[1:M]: + G = contracted_nodes(G, (0, j), (n, j)) + G.remove_node((n, M)) + + # calc position in embedded space + ii = (i for i in cols for j in rows) + jj = (j for i in cols for j in rows) + xx = (0.5 + i + i // 2 + (j % 2) * ((i % 2) - 0.5) for i in cols for j in rows) + h = sqrt(3) / 2 + if periodic: + yy = (h * j + 0.01 * i * i for i in cols for j in rows) + else: + yy = (h * j for i in cols for j in rows) + # exclude nodes not in G + pos = {(i, j): (x, y) for i, j, x, y in zip(ii, jj, xx, yy) if (i, j) in G} + set_node_attributes(G, pos, "pos") + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/line.py b/.venv/lib/python3.12/site-packages/networkx/generators/line.py new file mode 100644 index 0000000..2c6274a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/line.py @@ -0,0 +1,501 @@ +"""Functions for generating line graphs.""" + +from collections import defaultdict +from functools import partial +from itertools import combinations + +import networkx as nx +from networkx.utils import arbitrary_element +from networkx.utils.decorators import not_implemented_for + +__all__ = ["line_graph", "inverse_line_graph"] + + +@nx._dispatchable(returns_graph=True) +def line_graph(G, create_using=None): + r"""Returns the line graph of the graph or digraph `G`. + + The line graph of a graph `G` has a node for each edge in `G` and an + edge joining those nodes if the two edges in `G` share a common node. For + directed graphs, nodes are adjacent exactly when the edges they represent + form a directed path of length two. + + The nodes of the line graph are 2-tuples of nodes in the original graph (or + 3-tuples for multigraphs, with the key of the edge as the third element). + + For information about self-loops and more discussion, see the **Notes** + section below. + + Parameters + ---------- + G : graph + A NetworkX Graph, DiGraph, MultiGraph, or MultiDigraph. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + L : graph + The line graph of G. + + Examples + -------- + >>> G = nx.star_graph(3) + >>> L = nx.line_graph(G) + >>> print(sorted(map(sorted, L.edges()))) # makes a 3-clique, K3 + [[(0, 1), (0, 2)], [(0, 1), (0, 3)], [(0, 2), (0, 3)]] + + Edge attributes from `G` are not copied over as node attributes in `L`, but + attributes can be copied manually: + + >>> G = nx.path_graph(4) + >>> G.add_edges_from((u, v, {"tot": u + v}) for u, v in G.edges) + >>> G.edges(data=True) + EdgeDataView([(0, 1, {'tot': 1}), (1, 2, {'tot': 3}), (2, 3, {'tot': 5})]) + >>> H = nx.line_graph(G) + >>> H.add_nodes_from((node, G.edges[node]) for node in H) + >>> H.nodes(data=True) + NodeDataView({(0, 1): {'tot': 1}, (2, 3): {'tot': 5}, (1, 2): {'tot': 3}}) + + Notes + ----- + Graph, node, and edge data are not propagated to the new graph. For + undirected graphs, the nodes in G must be sortable, otherwise the + constructed line graph may not be correct. + + *Self-loops in undirected graphs* + + For an undirected graph `G` without multiple edges, each edge can be + written as a set `\{u, v\}`. Its line graph `L` has the edges of `G` as + its nodes. If `x` and `y` are two nodes in `L`, then `\{x, y\}` is an edge + in `L` if and only if the intersection of `x` and `y` is nonempty. Thus, + the set of all edges is determined by the set of all pairwise intersections + of edges in `G`. + + Trivially, every edge in G would have a nonzero intersection with itself, + and so every node in `L` should have a self-loop. This is not so + interesting, and the original context of line graphs was with simple + graphs, which had no self-loops or multiple edges. The line graph was also + meant to be a simple graph and thus, self-loops in `L` are not part of the + standard definition of a line graph. In a pairwise intersection matrix, + this is analogous to excluding the diagonal entries from the line graph + definition. + + Self-loops and multiple edges in `G` add nodes to `L` in a natural way, and + do not require any fundamental changes to the definition. It might be + argued that the self-loops we excluded before should now be included. + However, the self-loops are still "trivial" in some sense and thus, are + usually excluded. + + *Self-loops in directed graphs* + + For a directed graph `G` without multiple edges, each edge can be written + as a tuple `(u, v)`. Its line graph `L` has the edges of `G` as its + nodes. If `x` and `y` are two nodes in `L`, then `(x, y)` is an edge in `L` + if and only if the tail of `x` matches the head of `y`, for example, if `x + = (a, b)` and `y = (b, c)` for some vertices `a`, `b`, and `c` in `G`. + + Due to the directed nature of the edges, it is no longer the case that + every edge in `G` should have a self-loop in `L`. Now, the only time + self-loops arise is if a node in `G` itself has a self-loop. So such + self-loops are no longer "trivial" but instead, represent essential + features of the topology of `G`. For this reason, the historical + development of line digraphs is such that self-loops are included. When the + graph `G` has multiple edges, once again only superficial changes are + required to the definition. + + References + ---------- + * Harary, Frank, and Norman, Robert Z., "Some properties of line digraphs", + Rend. Circ. Mat. Palermo, II. Ser. 9 (1960), 161--168. + * Hemminger, R. L.; Beineke, L. W. (1978), "Line graphs and line digraphs", + in Beineke, L. W.; Wilson, R. J., Selected Topics in Graph Theory, + Academic Press Inc., pp. 271--305. + + """ + if G.is_directed(): + L = _lg_directed(G, create_using=create_using) + else: + L = _lg_undirected(G, selfloops=False, create_using=create_using) + return L + + +def _lg_directed(G, create_using=None): + """Returns the line graph L of the (multi)digraph G. + + Edges in G appear as nodes in L, represented as tuples of the form (u,v) + or (u,v,key) if G is a multidigraph. A node in L corresponding to the edge + (u,v) is connected to every node corresponding to an edge (v,w). + + Parameters + ---------- + G : digraph + A directed graph or directed multigraph. + create_using : NetworkX graph constructor, optional + Graph type to create. If graph instance, then cleared before populated. + Default is to use the same graph class as `G`. + + """ + L = nx.empty_graph(0, create_using, default=G.__class__) + + # Create a graph specific edge function. + get_edges = partial(G.edges, keys=True) if G.is_multigraph() else G.edges + + for from_node in get_edges(): + # from_node is: (u,v) or (u,v,key) + L.add_node(from_node) + for to_node in get_edges(from_node[1]): + L.add_edge(from_node, to_node) + + return L + + +def _lg_undirected(G, selfloops=False, create_using=None): + """Returns the line graph L of the (multi)graph G. + + Edges in G appear as nodes in L, represented as sorted tuples of the form + (u,v), or (u,v,key) if G is a multigraph. A node in L corresponding to + the edge {u,v} is connected to every node corresponding to an edge that + involves u or v. + + Parameters + ---------- + G : graph + An undirected graph or multigraph. + selfloops : bool + If `True`, then self-loops are included in the line graph. If `False`, + they are excluded. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Notes + ----- + The standard algorithm for line graphs of undirected graphs does not + produce self-loops. + + """ + L = nx.empty_graph(0, create_using, default=G.__class__) + + # Graph specific functions for edges. + get_edges = partial(G.edges, keys=True) if G.is_multigraph() else G.edges + + # Determine if we include self-loops or not. + shift = 0 if selfloops else 1 + + # Introduce numbering of nodes + node_index = {n: i for i, n in enumerate(G)} + + # Lift canonical representation of nodes to edges in line graph + def edge_key_function(edge): + return node_index[edge[0]], node_index[edge[1]] + + edges = set() + for u in G: + # Label nodes as a sorted tuple of nodes in original graph. + # Decide on representation of {u, v} as (u, v) or (v, u) depending on node_index. + # -> This ensures a canonical representation and avoids comparing values of different types. + nodes = [tuple(sorted(x[:2], key=node_index.get)) + x[2:] for x in get_edges(u)] + + if len(nodes) == 1: + # Then the edge will be an isolated node in L. + L.add_node(nodes[0]) + + # Add a clique of `nodes` to graph. To prevent double adding edges, + # especially important for multigraphs, we store the edges in + # canonical form in a set. + for i, a in enumerate(nodes): + edges.update( + [ + tuple(sorted((a, b), key=edge_key_function)) + for b in nodes[i + shift :] + ] + ) + + L.add_edges_from(edges) + return L + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(returns_graph=True) +def inverse_line_graph(G): + """Returns the inverse line graph of graph G. + + If H is a graph, and G is the line graph of H, such that G = L(H). + Then H is the inverse line graph of G. + + Not all graphs are line graphs and these do not have an inverse line graph. + In these cases this function raises a NetworkXError. + + Parameters + ---------- + G : graph + A NetworkX Graph + + Returns + ------- + H : graph + The inverse line graph of G. + + Raises + ------ + NetworkXNotImplemented + If G is directed or a multigraph + + NetworkXError + If G is not a line graph + + Notes + ----- + This is an implementation of the Roussopoulos algorithm[1]_. + + If G consists of multiple components, then the algorithm doesn't work. + You should invert every component separately: + + >>> K5 = nx.complete_graph(5) + >>> P4 = nx.Graph([("a", "b"), ("b", "c"), ("c", "d")]) + >>> G = nx.union(K5, P4) + >>> root_graphs = [] + >>> for comp in nx.connected_components(G): + ... root_graphs.append(nx.inverse_line_graph(G.subgraph(comp))) + >>> len(root_graphs) + 2 + + References + ---------- + .. [1] Roussopoulos, N.D. , "A max {m, n} algorithm for determining the graph H from + its line graph G", Information Processing Letters 2, (1973), 108--112, ISSN 0020-0190, + `DOI link `_ + + """ + if G.number_of_nodes() == 0: + return nx.empty_graph(1) + elif G.number_of_nodes() == 1: + v = arbitrary_element(G) + a = (v, 0) + b = (v, 1) + H = nx.Graph([(a, b)]) + return H + elif G.number_of_nodes() > 1 and G.number_of_edges() == 0: + msg = ( + "inverse_line_graph() doesn't work on an edgeless graph. " + "Please use this function on each component separately." + ) + raise nx.NetworkXError(msg) + + if nx.number_of_selfloops(G) != 0: + msg = ( + "A line graph as generated by NetworkX has no selfloops, so G has no " + "inverse line graph. Please remove the selfloops from G and try again." + ) + raise nx.NetworkXError(msg) + + starting_cell = _select_starting_cell(G) + P = _find_partition(G, starting_cell) + # count how many times each vertex appears in the partition set + P_count = dict.fromkeys(G.nodes, 0) + for p in P: + for u in p: + P_count[u] += 1 + + if max(P_count.values()) > 2: + msg = "G is not a line graph (vertex found in more than two partition cells)" + raise nx.NetworkXError(msg) + W = tuple((u,) for u in P_count if P_count[u] == 1) + H = nx.Graph() + H.add_nodes_from(P) + H.add_nodes_from(W) + for a, b in combinations(H.nodes, 2): + if any(a_bit in b for a_bit in a): + H.add_edge(a, b) + return H + + +def _triangles(G, e): + """Return list of all triangles containing edge e""" + u, v = e + if u not in G: + raise nx.NetworkXError(f"Vertex {u} not in graph") + if v not in G[u]: + raise nx.NetworkXError(f"Edge ({u}, {v}) not in graph") + triangle_list = [] + for x in G[u]: + if x in G[v]: + triangle_list.append((u, v, x)) + return triangle_list + + +def _odd_triangle(G, T): + """Test whether T is an odd triangle in G + + Parameters + ---------- + G : NetworkX Graph + T : 3-tuple of vertices forming triangle in G + + Returns + ------- + True is T is an odd triangle + False otherwise + + Raises + ------ + NetworkXError + T is not a triangle in G + + Notes + ----- + An odd triangle is one in which there exists another vertex in G which is + adjacent to either exactly one or exactly all three of the vertices in the + triangle. + + """ + for u in T: + if u not in G.nodes(): + raise nx.NetworkXError(f"Vertex {u} not in graph") + for e in list(combinations(T, 2)): + if e[0] not in G[e[1]]: + raise nx.NetworkXError(f"Edge ({e[0]}, {e[1]}) not in graph") + + T_nbrs = defaultdict(int) + for t in T: + for v in G[t]: + if v not in T: + T_nbrs[v] += 1 + return any(T_nbrs[v] in [1, 3] for v in T_nbrs) + + +def _find_partition(G, starting_cell): + """Find a partition of the vertices of G into cells of complete graphs + + Parameters + ---------- + G : NetworkX Graph + starting_cell : tuple of vertices in G which form a cell + + Returns + ------- + List of tuples of vertices of G + + Raises + ------ + NetworkXError + If a cell is not a complete subgraph then G is not a line graph + """ + G_partition = G.copy() + P = [starting_cell] # partition set + G_partition.remove_edges_from(list(combinations(starting_cell, 2))) + # keep list of partitioned nodes which might have an edge in G_partition + partitioned_vertices = list(starting_cell) + while G_partition.number_of_edges() > 0: + # there are still edges left and so more cells to be made + u = partitioned_vertices.pop() + deg_u = len(G_partition[u]) + if deg_u != 0: + # if u still has edges then we need to find its other cell + # this other cell must be a complete subgraph or else G is + # not a line graph + new_cell = [u] + list(G_partition[u]) + for u in new_cell: + for v in new_cell: + if (u != v) and (v not in G_partition[u]): + msg = ( + "G is not a line graph " + "(partition cell not a complete subgraph)" + ) + raise nx.NetworkXError(msg) + P.append(tuple(new_cell)) + G_partition.remove_edges_from(list(combinations(new_cell, 2))) + partitioned_vertices += new_cell + return P + + +def _select_starting_cell(G, starting_edge=None): + """Select a cell to initiate _find_partition + + Parameters + ---------- + G : NetworkX Graph + starting_edge: an edge to build the starting cell from + + Returns + ------- + Tuple of vertices in G + + Raises + ------ + NetworkXError + If it is determined that G is not a line graph + + Notes + ----- + If starting edge not specified then pick an arbitrary edge - doesn't + matter which. However, this function may call itself requiring a + specific starting edge. Note that the r, s notation for counting + triangles is the same as in the Roussopoulos paper cited above. + """ + if starting_edge is None: + e = arbitrary_element(G.edges()) + else: + e = starting_edge + if e[0] not in G.nodes(): + raise nx.NetworkXError(f"Vertex {e[0]} not in graph") + if e[1] not in G[e[0]]: + msg = f"starting_edge ({e[0]}, {e[1]}) is not in the Graph" + raise nx.NetworkXError(msg) + e_triangles = _triangles(G, e) + r = len(e_triangles) + if r == 0: + # there are no triangles containing e, so the starting cell is just e + starting_cell = e + elif r == 1: + # there is exactly one triangle, T, containing e. If other 2 edges + # of T belong only to this triangle then T is starting cell + T = e_triangles[0] + a, b, c = T + # ab was original edge so check the other 2 edges + ac_edges = len(_triangles(G, (a, c))) + bc_edges = len(_triangles(G, (b, c))) + if ac_edges == 1: + if bc_edges == 1: + starting_cell = T + else: + return _select_starting_cell(G, starting_edge=(b, c)) + else: + return _select_starting_cell(G, starting_edge=(a, c)) + else: + # r >= 2 so we need to count the number of odd triangles, s + s = 0 + odd_triangles = [] + for T in e_triangles: + if _odd_triangle(G, T): + s += 1 + odd_triangles.append(T) + if r == 2 and s == 0: + # in this case either triangle works, so just use T + starting_cell = T + elif r - 1 <= s <= r: + # check if odd triangles containing e form complete subgraph + triangle_nodes = set() + for T in odd_triangles: + for x in T: + triangle_nodes.add(x) + + for u in triangle_nodes: + for v in triangle_nodes: + if u != v and (v not in G[u]): + msg = ( + "G is not a line graph (odd triangles " + "do not form complete subgraph)" + ) + raise nx.NetworkXError(msg) + # otherwise then we can use this as the starting cell + starting_cell = tuple(triangle_nodes) + + else: + msg = ( + "G is not a line graph (incorrect number of " + "odd triangles around starting edge)" + ) + raise nx.NetworkXError(msg) + return starting_cell diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/mycielski.py b/.venv/lib/python3.12/site-packages/networkx/generators/mycielski.py new file mode 100644 index 0000000..804b903 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/mycielski.py @@ -0,0 +1,110 @@ +"""Functions related to the Mycielski Operation and the Mycielskian family +of graphs. + +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["mycielskian", "mycielski_graph"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(returns_graph=True) +def mycielskian(G, iterations=1): + r"""Returns the Mycielskian of a simple, undirected graph G + + The Mycielskian of graph preserves a graph's triangle free + property while increasing the chromatic number by 1. + + The Mycielski Operation on a graph, :math:`G=(V, E)`, constructs a new + graph with :math:`2|V| + 1` nodes and :math:`3|E| + |V|` edges. + + The construction is as follows: + + Let :math:`V = {0, ..., n-1}`. Construct another vertex set + :math:`U = {n, ..., 2n}` and a vertex, `w`. + Construct a new graph, `M`, with vertices :math:`U \bigcup V \bigcup w`. + For edges, :math:`(u, v) \in E` add edges :math:`(u, v), (u, v + n)`, and + :math:`(u + n, v)` to M. Finally, for all vertices :math:`u \in U`, add + edge :math:`(u, w)` to M. + + The Mycielski Operation can be done multiple times by repeating the above + process iteratively. + + More information can be found at https://en.wikipedia.org/wiki/Mycielskian + + Parameters + ---------- + G : graph + A simple, undirected NetworkX graph + iterations : int + The number of iterations of the Mycielski operation to + perform on G. Defaults to 1. Must be a non-negative integer. + + Returns + ------- + M : graph + The Mycielskian of G after the specified number of iterations. + + Notes + ----- + Graph, node, and edge data are not necessarily propagated to the new graph. + + """ + + M = nx.convert_node_labels_to_integers(G) + + for i in range(iterations): + n = M.number_of_nodes() + M.add_nodes_from(range(n, 2 * n)) + old_edges = list(M.edges()) + M.add_edges_from((u, v + n) for u, v in old_edges) + M.add_edges_from((u + n, v) for u, v in old_edges) + M.add_node(2 * n) + M.add_edges_from((u + n, 2 * n) for u in range(n)) + + return M + + +@nx._dispatchable(graphs=None, returns_graph=True) +def mycielski_graph(n): + """Generator for the n_th Mycielski Graph. + + The Mycielski family of graphs is an infinite set of graphs. + :math:`M_1` is the singleton graph, :math:`M_2` is two vertices with an + edge, and, for :math:`i > 2`, :math:`M_i` is the Mycielskian of + :math:`M_{i-1}`. + + More information can be found at + http://mathworld.wolfram.com/MycielskiGraph.html + + Parameters + ---------- + n : int + The desired Mycielski Graph. + + Returns + ------- + M : graph + The n_th Mycielski Graph + + Notes + ----- + The first graph in the Mycielski sequence is the singleton graph. + The Mycielskian of this graph is not the :math:`P_2` graph, but rather the + :math:`P_2` graph with an extra, isolated vertex. The second Mycielski + graph is the :math:`P_2` graph, so the first two are hard coded. + The remaining graphs are generated using the Mycielski operation. + + """ + + if n < 1: + raise nx.NetworkXError("must satisfy n >= 1") + + if n == 1: + return nx.empty_graph(1) + + else: + return mycielskian(nx.path_graph(2), n - 2) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/nonisomorphic_trees.py b/.venv/lib/python3.12/site-packages/networkx/generators/nonisomorphic_trees.py new file mode 100644 index 0000000..2f19f93 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/nonisomorphic_trees.py @@ -0,0 +1,209 @@ +""" +Implementation of the Wright, Richmond, Odlyzko and McKay (WROM) +algorithm for the enumeration of all non-isomorphic free trees of a +given order. Rooted trees are represented by level sequences, i.e., +lists in which the i-th element specifies the distance of vertex i to +the root. + +""" + +__all__ = ["nonisomorphic_trees", "number_of_nonisomorphic_trees"] + +from functools import lru_cache + +import networkx as nx + + +@nx._dispatchable(graphs=None, returns_graph=True) +def nonisomorphic_trees(order): + """Generates lists of nonisomorphic trees + + Parameters + ---------- + order : int + order of the desired tree(s) + + Yields + ------ + list of `networkx.Graph` instances + A list of nonisomorphic trees + """ + + if order < 2: + raise ValueError + # start at the path graph rooted at its center + layout = list(range(order // 2 + 1)) + list(range(1, (order + 1) // 2)) + + while layout is not None: + layout = _next_tree(layout) + if layout is not None: + yield _layout_to_graph(layout) + layout = _next_rooted_tree(layout) + + +@nx._dispatchable(graphs=None) +def number_of_nonisomorphic_trees(order): + """Returns the number of nonisomorphic trees + + Based on an algorithm by Alois P. Heinz in + `OEIS entry A000055 `_. Complexity is ``O(n ** 3)`` + + Parameters + ---------- + order : int + order of the desired tree(s) + + Returns + ------- + int + Number of nonisomorphic graphs for the given order + """ + if order < 2: + raise ValueError + return _unlabeled_trees(order) + + +@lru_cache(None) +def _unlabeled_trees(n): + """Implements OEIS A000055 (number of unlabeled trees).""" + + value = 0 + for k in range(n + 1): + value += _rooted_trees(k) * _rooted_trees(n - k) + if n % 2 == 0: + value -= _rooted_trees(n // 2) + return _rooted_trees(n) - value // 2 + + +@lru_cache(None) +def _rooted_trees(n): + """Implements OEIS A000081 (number of unlabeled rooted trees).""" + + if n < 2: + return n + value = 0 + for j in range(1, n): + for d in range(1, n): + if j % d == 0: + value += d * _rooted_trees(d) * _rooted_trees(n - j) + return value // (n - 1) + + +def _next_rooted_tree(predecessor, p=None): + """One iteration of the Beyer-Hedetniemi algorithm.""" + + if p is None: + p = len(predecessor) - 1 + while predecessor[p] == 1: + p -= 1 + if p == 0: + return None + + q = p - 1 + while predecessor[q] != predecessor[p] - 1: + q -= 1 + result = list(predecessor) + for i in range(p, len(result)): + result[i] = result[i - p + q] + return result + + +def _next_tree(candidate): + """One iteration of the Wright, Richmond, Odlyzko and McKay + algorithm.""" + + # valid representation of a free tree if: + # there are at least two vertices at layer 1 + # (this is always the case because we start at the path graph) + left, rest = _split_tree(candidate) + + # and the left subtree of the root + # is less high than the tree with the left subtree removed + left_height = max(left) + rest_height = max(rest) + valid = rest_height >= left_height + + if valid and rest_height == left_height: + # and, if left and rest are of the same height, + # if left does not encompass more vertices + if len(left) > len(rest): + valid = False + # and, if they have the same number or vertices, + # if left does not come after rest lexicographically + elif len(left) == len(rest) and left > rest: + valid = False + + if valid: + return candidate + else: + # jump to the next valid free tree + p = len(left) + new_candidate = _next_rooted_tree(candidate, p) + if candidate[p] > 2: + new_left, new_rest = _split_tree(new_candidate) + new_left_height = max(new_left) + suffix = range(1, new_left_height + 2) + new_candidate[-len(suffix) :] = suffix + return new_candidate + + +def _split_tree(layout): + """Returns a tuple of two layouts, one containing the left + subtree of the root vertex, and one containing the original tree + with the left subtree removed.""" + + one_found = False + m = None + for i in range(len(layout)): + if layout[i] == 1: + if one_found: + m = i + break + else: + one_found = True + + if m is None: + m = len(layout) + + left = [layout[i] - 1 for i in range(1, m)] + rest = [0] + [layout[i] for i in range(m, len(layout))] + return (left, rest) + + +def _layout_to_matrix(layout): + """Create the adjacency matrix for the tree specified by the + given layout (level sequence).""" + + result = [[0] * len(layout) for i in range(len(layout))] + stack = [] + for i in range(len(layout)): + i_level = layout[i] + if stack: + j = stack[-1] + j_level = layout[j] + while j_level >= i_level: + stack.pop() + j = stack[-1] + j_level = layout[j] + result[i][j] = result[j][i] = 1 + stack.append(i) + return result + + +def _layout_to_graph(layout): + """Create a NetworkX Graph for the tree specified by the + given layout(level sequence)""" + G = nx.Graph() + stack = [] + for i in range(len(layout)): + i_level = layout[i] + if stack: + j = stack[-1] + j_level = layout[j] + while j_level >= i_level: + stack.pop() + j = stack[-1] + j_level = layout[j] + G.add_edge(i, j) + stack.append(i) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/random_clustered.py b/.venv/lib/python3.12/site-packages/networkx/generators/random_clustered.py new file mode 100644 index 0000000..8fbf855 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/random_clustered.py @@ -0,0 +1,117 @@ +"""Generate graphs with given degree and triangle sequence.""" + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = ["random_clustered_graph"] + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_clustered_graph(joint_degree_sequence, create_using=None, seed=None): + r"""Generate a random graph with the given joint independent edge degree and + triangle degree sequence. + + This uses a configuration model-like approach to generate a random graph + (with parallel edges and self-loops) by randomly assigning edges to match + the given joint degree sequence. + + The joint degree sequence is a list of pairs of integers of the form + $[(d_{1,i}, d_{1,t}), \dotsc, (d_{n,i}, d_{n,t})]$. According to this list, + vertex $u$ is a member of $d_{u,t}$ triangles and has $d_{u, i}$ other + edges. The number $d_{u,t}$ is the *triangle degree* of $u$ and the number + $d_{u,i}$ is the *independent edge degree*. + + Parameters + ---------- + joint_degree_sequence : list of integer pairs + Each list entry corresponds to the independent edge degree and + triangle degree of a node. + create_using : NetworkX graph constructor, optional (default MultiGraph) + Graph type to create. If graph instance, then cleared before populated. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + G : MultiGraph + A graph with the specified degree sequence. Nodes are labeled + starting at 0 with an index corresponding to the position in + deg_sequence. + + Raises + ------ + NetworkXError + If the independent edge degree sequence sum is not even + or the triangle degree sequence sum is not divisible by 3. + + Notes + ----- + As described by Miller [1]_ (see also Newman [2]_ for an equivalent + description). + + A non-graphical degree sequence (not realizable by some simple + graph) is allowed since this function returns graphs with self + loops and parallel edges. An exception is raised if the + independent degree sequence does not have an even sum or the + triangle degree sequence sum is not divisible by 3. + + This configuration model-like construction process can lead to + duplicate edges and loops. You can remove the self-loops and + parallel edges (see below) which will likely result in a graph + that doesn't have the exact degree sequence specified. This + "finite-size effect" decreases as the size of the graph increases. + + References + ---------- + .. [1] Joel C. Miller. "Percolation and epidemics in random clustered + networks". In: Physical review. E, Statistical, nonlinear, and soft + matter physics 80 (2 Part 1 August 2009). + .. [2] M. E. J. Newman. "Random Graphs with Clustering". + In: Physical Review Letters 103 (5 July 2009) + + Examples + -------- + >>> deg = [(1, 0), (1, 0), (1, 0), (2, 0), (1, 0), (2, 1), (0, 1), (0, 1)] + >>> G = nx.random_clustered_graph(deg) + + To remove parallel edges: + + >>> G = nx.Graph(G) + + To remove self loops: + + >>> G.remove_edges_from(nx.selfloop_edges(G)) + + """ + # In Python 3, zip() returns an iterator. Make this into a list. + joint_degree_sequence = list(joint_degree_sequence) + + N = len(joint_degree_sequence) + G = nx.empty_graph(N, create_using, default=nx.MultiGraph) + if G.is_directed(): + raise nx.NetworkXError("Directed Graph not supported") + + ilist = [] + tlist = [] + for n in G: + degrees = joint_degree_sequence[n] + for icount in range(degrees[0]): + ilist.append(n) + for tcount in range(degrees[1]): + tlist.append(n) + + if len(ilist) % 2 != 0 or len(tlist) % 3 != 0: + raise nx.NetworkXError("Invalid degree sequence") + + seed.shuffle(ilist) + seed.shuffle(tlist) + while ilist: + G.add_edge(ilist.pop(), ilist.pop()) + while tlist: + n1 = tlist.pop() + n2 = tlist.pop() + n3 = tlist.pop() + G.add_edges_from([(n1, n2), (n1, n3), (n2, n3)]) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/random_graphs.py b/.venv/lib/python3.12/site-packages/networkx/generators/random_graphs.py new file mode 100644 index 0000000..90ae0d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/random_graphs.py @@ -0,0 +1,1400 @@ +""" +Generators for random graphs. + +""" + +import itertools +import math +from collections import defaultdict + +import networkx as nx +from networkx.utils import py_random_state + +from ..utils.misc import check_create_using +from .classic import complete_graph, empty_graph, path_graph, star_graph +from .degree_seq import degree_sequence_tree + +__all__ = [ + "fast_gnp_random_graph", + "gnp_random_graph", + "dense_gnm_random_graph", + "gnm_random_graph", + "erdos_renyi_graph", + "binomial_graph", + "newman_watts_strogatz_graph", + "watts_strogatz_graph", + "connected_watts_strogatz_graph", + "random_regular_graph", + "barabasi_albert_graph", + "dual_barabasi_albert_graph", + "extended_barabasi_albert_graph", + "powerlaw_cluster_graph", + "random_lobster", + "random_shell_graph", + "random_powerlaw_tree", + "random_powerlaw_tree_sequence", + "random_kernel_graph", +] + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def fast_gnp_random_graph(n, p, seed=None, directed=False, *, create_using=None): + """Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph or + a binomial graph. + + Parameters + ---------- + n : int + The number of nodes. + p : float + Probability for edge creation. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + directed : bool, optional (default=False) + If True, this function returns a directed graph. + create_using : Graph constructor, optional (default=nx.Graph or nx.DiGraph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph types are not supported and raise a ``NetworkXError``. + By default NetworkX Graph or DiGraph are used depending on `directed`. + + Notes + ----- + The $G_{n,p}$ graph algorithm chooses each of the $[n (n - 1)] / 2$ + (undirected) or $n (n - 1)$ (directed) possible edges with probability $p$. + + This algorithm [1]_ runs in $O(n + m)$ time, where `m` is the expected number of + edges, which equals $p n (n - 1) / 2$. This should be faster than + :func:`gnp_random_graph` when $p$ is small and the expected number of edges + is small (that is, the graph is sparse). + + See Also + -------- + gnp_random_graph + + References + ---------- + .. [1] Vladimir Batagelj and Ulrik Brandes, + "Efficient generation of large random networks", + Phys. Rev. E, 71, 036113, 2005. + """ + default = nx.DiGraph if directed else nx.Graph + create_using = check_create_using( + create_using, directed=directed, multigraph=False, default=default + ) + if p <= 0 or p >= 1: + return nx.gnp_random_graph( + n, p, seed=seed, directed=directed, create_using=create_using + ) + + G = empty_graph(n, create_using=create_using) + + lp = math.log(1.0 - p) + + if directed: + v = 1 + w = -1 + while v < n: + lr = math.log(1.0 - seed.random()) + w = w + 1 + int(lr / lp) + while w >= v and v < n: + w = w - v + v = v + 1 + if v < n: + G.add_edge(w, v) + + # Nodes in graph are from 0,n-1 (start with v as the second node index). + v = 1 + w = -1 + while v < n: + lr = math.log(1.0 - seed.random()) + w = w + 1 + int(lr / lp) + while w >= v and v < n: + w = w - v + v = v + 1 + if v < n: + G.add_edge(v, w) + return G + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def gnp_random_graph(n, p, seed=None, directed=False, *, create_using=None): + """Returns a $G_{n,p}$ random graph, also known as an Erdős-Rényi graph + or a binomial graph. + + The $G_{n,p}$ model chooses each of the possible edges with probability $p$. + + Parameters + ---------- + n : int + The number of nodes. + p : float + Probability for edge creation. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + directed : bool, optional (default=False) + If True, this function returns a directed graph. + create_using : Graph constructor, optional (default=nx.Graph or nx.DiGraph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph types are not supported and raise a ``NetworkXError``. + By default NetworkX Graph or DiGraph are used depending on `directed`. + + See Also + -------- + fast_gnp_random_graph + + Notes + ----- + This algorithm [2]_ runs in $O(n^2)$ time. For sparse graphs (that is, for + small values of $p$), :func:`fast_gnp_random_graph` is a faster algorithm. + + :func:`binomial_graph` and :func:`erdos_renyi_graph` are + aliases for :func:`gnp_random_graph`. + + >>> nx.binomial_graph is nx.gnp_random_graph + True + >>> nx.erdos_renyi_graph is nx.gnp_random_graph + True + + References + ---------- + .. [1] P. Erdős and A. Rényi, On Random Graphs, Publ. Math. 6, 290 (1959). + .. [2] E. N. Gilbert, Random Graphs, Ann. Math. Stat., 30, 1141 (1959). + """ + default = nx.DiGraph if directed else nx.Graph + create_using = check_create_using( + create_using, directed=directed, multigraph=False, default=default + ) + if p >= 1: + return complete_graph(n, create_using=create_using) + + G = nx.empty_graph(n, create_using=create_using) + if p <= 0: + return G + + edgetool = itertools.permutations if directed else itertools.combinations + for e in edgetool(range(n), 2): + if seed.random() < p: + G.add_edge(*e) + return G + + +# add some aliases to common names +binomial_graph = gnp_random_graph +erdos_renyi_graph = gnp_random_graph + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def dense_gnm_random_graph(n, m, seed=None, *, create_using=None): + """Returns a $G_{n,m}$ random graph. + + In the $G_{n,m}$ model, a graph is chosen uniformly at random from the set + of all graphs with $n$ nodes and $m$ edges. + + This algorithm should be faster than :func:`gnm_random_graph` for dense + graphs. + + Parameters + ---------- + n : int + The number of nodes. + m : int + The number of edges. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + See Also + -------- + gnm_random_graph + + Notes + ----- + Algorithm by Keith M. Briggs Mar 31, 2006. + Inspired by Knuth's Algorithm S (Selection sampling technique), + in section 3.4.2 of [1]_. + + References + ---------- + .. [1] Donald E. Knuth, The Art of Computer Programming, + Volume 2/Seminumerical algorithms, Third Edition, Addison-Wesley, 1997. + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + mmax = n * (n - 1) // 2 + if m >= mmax: + return complete_graph(n, create_using) + G = empty_graph(n, create_using) + + if n == 1: + return G + + u = 0 + v = 1 + t = 0 + k = 0 + while True: + if seed.randrange(mmax - t) < m - k: + G.add_edge(u, v) + k += 1 + if k == m: + return G + t += 1 + v += 1 + if v == n: # go to next row of adjacency matrix + u += 1 + v = u + 1 + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def gnm_random_graph(n, m, seed=None, directed=False, *, create_using=None): + """Returns a $G_{n,m}$ random graph. + + In the $G_{n,m}$ model, a graph is chosen uniformly at random from the set + of all graphs with $n$ nodes and $m$ edges. + + This algorithm should be faster than :func:`dense_gnm_random_graph` for + sparse graphs. + + Parameters + ---------- + n : int + The number of nodes. + m : int + The number of edges. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + directed : bool, optional (default=False) + If True return a directed graph + create_using : Graph constructor, optional (default=nx.Graph or nx.DiGraph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph types are not supported and raise a ``NetworkXError``. + By default NetworkX Graph or DiGraph are used depending on `directed`. + + See also + -------- + dense_gnm_random_graph + + """ + default = nx.DiGraph if directed else nx.Graph + create_using = check_create_using( + create_using, directed=directed, multigraph=False, default=default + ) + if n == 1: + return nx.empty_graph(n, create_using=create_using) + max_edges = n * (n - 1) if directed else n * (n - 1) / 2.0 + if m >= max_edges: + return complete_graph(n, create_using=create_using) + + G = nx.empty_graph(n, create_using=create_using) + nlist = list(G) + edge_count = 0 + while edge_count < m: + # generate random edge,u,v + u = seed.choice(nlist) + v = seed.choice(nlist) + if u == v or G.has_edge(u, v): + continue + else: + G.add_edge(u, v) + edge_count = edge_count + 1 + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def newman_watts_strogatz_graph(n, k, p, seed=None, *, create_using=None): + """Returns a Newman–Watts–Strogatz small-world graph. + + Parameters + ---------- + n : int + The number of nodes. + k : int + Each node is joined with its `k` nearest neighbors in a ring + topology. + p : float + The probability of adding a new edge for each edge. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Notes + ----- + First create a ring over $n$ nodes [1]_. Then each node in the ring is + connected with its $k$ nearest neighbors (or $k - 1$ neighbors if $k$ + is odd). Then shortcuts are created by adding new edges as follows: for + each edge $(u, v)$ in the underlying "$n$-ring with $k$ nearest + neighbors" with probability $p$ add a new edge $(u, w)$ with + randomly-chosen existing node $w$. In contrast with + :func:`watts_strogatz_graph`, no edges are removed. + + See Also + -------- + watts_strogatz_graph + + References + ---------- + .. [1] M. E. J. Newman and D. J. Watts, + Renormalization group analysis of the small-world network model, + Physics Letters A, 263, 341, 1999. + https://doi.org/10.1016/S0375-9601(99)00757-4 + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if k > n: + raise nx.NetworkXError("k>=n, choose smaller k or larger n") + + # If k == n the graph return is a complete graph + if k == n: + return nx.complete_graph(n, create_using) + + G = empty_graph(n, create_using) + nlist = list(G.nodes()) + fromv = nlist + # connect the k/2 neighbors + for j in range(1, k // 2 + 1): + tov = fromv[j:] + fromv[0:j] # the first j are now last + for i in range(len(fromv)): + G.add_edge(fromv[i], tov[i]) + # for each edge u-v, with probability p, randomly select existing + # node w and add new edge u-w + e = list(G.edges()) + for u, v in e: + if seed.random() < p: + w = seed.choice(nlist) + # no self-loops and reject if edge u-w exists + # is that the correct NWS model? + while w == u or G.has_edge(u, w): + w = seed.choice(nlist) + if G.degree(u) >= n - 1: + break # skip this rewiring + else: + G.add_edge(u, w) + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def watts_strogatz_graph(n, k, p, seed=None, *, create_using=None): + """Returns a Watts–Strogatz small-world graph. + + Parameters + ---------- + n : int + The number of nodes + k : int + Each node is joined with its `k` nearest neighbors in a ring + topology. + p : float + The probability of rewiring each edge + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + See Also + -------- + newman_watts_strogatz_graph + connected_watts_strogatz_graph + + Notes + ----- + First create a ring over $n$ nodes [1]_. Then each node in the ring is joined + to its $k$ nearest neighbors (or $k - 1$ neighbors if $k$ is odd). + Then shortcuts are created by replacing some edges as follows: for each + edge $(u, v)$ in the underlying "$n$-ring with $k$ nearest neighbors" + with probability $p$ replace it with a new edge $(u, w)$ with uniformly + random choice of existing node $w$. + + In contrast with :func:`newman_watts_strogatz_graph`, the random rewiring + does not increase the number of edges. The rewired graph is not guaranteed + to be connected as in :func:`connected_watts_strogatz_graph`. + + References + ---------- + .. [1] Duncan J. Watts and Steven H. Strogatz, + Collective dynamics of small-world networks, + Nature, 393, pp. 440--442, 1998. + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if k > n: + raise nx.NetworkXError("k>n, choose smaller k or larger n") + + # If k == n, the graph is complete not Watts-Strogatz + if k == n: + G = nx.complete_graph(n, create_using) + return G + + G = nx.empty_graph(n, create_using=create_using) + nodes = list(range(n)) # nodes are labeled 0 to n-1 + # connect each node to k/2 neighbors + for j in range(1, k // 2 + 1): + targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list + G.add_edges_from(zip(nodes, targets)) + # rewire edges from each node + # loop over all nodes in order (label) and neighbors in order (distance) + # no self loops or multiple edges allowed + for j in range(1, k // 2 + 1): # outer loop is neighbors + targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list + # inner loop in node order + for u, v in zip(nodes, targets): + if seed.random() < p: + w = seed.choice(nodes) + # Enforce no self-loops or multiple edges + while w == u or G.has_edge(u, w): + w = seed.choice(nodes) + if G.degree(u) >= n - 1: + break # skip this rewiring + else: + G.remove_edge(u, v) + G.add_edge(u, w) + return G + + +@py_random_state(4) +@nx._dispatchable(graphs=None, returns_graph=True) +def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None, *, create_using=None): + """Returns a connected Watts–Strogatz small-world graph. + + Attempts to generate a connected graph by repeated generation of + Watts–Strogatz small-world graphs. An exception is raised if the maximum + number of tries is exceeded. + + Parameters + ---------- + n : int + The number of nodes + k : int + Each node is joined with its `k` nearest neighbors in a ring + topology. + p : float + The probability of rewiring each edge + tries : int + Number of attempts to generate a connected graph. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Notes + ----- + First create a ring over $n$ nodes [1]_. Then each node in the ring is joined + to its $k$ nearest neighbors (or $k - 1$ neighbors if $k$ is odd). + Then shortcuts are created by replacing some edges as follows: for each + edge $(u, v)$ in the underlying "$n$-ring with $k$ nearest neighbors" + with probability $p$ replace it with a new edge $(u, w)$ with uniformly + random choice of existing node $w$. + The entire process is repeated until a connected graph results. + + See Also + -------- + newman_watts_strogatz_graph + watts_strogatz_graph + + References + ---------- + .. [1] Duncan J. Watts and Steven H. Strogatz, + Collective dynamics of small-world networks, + Nature, 393, pp. 440--442, 1998. + """ + for i in range(tries): + # seed is an RNG so should change sequence each call + G = watts_strogatz_graph(n, k, p, seed, create_using=create_using) + if nx.is_connected(G): + return G + raise nx.NetworkXError("Maximum number of tries exceeded") + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_regular_graph(d, n, seed=None, *, create_using=None): + r"""Returns a random $d$-regular graph on $n$ nodes. + + A regular graph is a graph where each node has the same number of neighbors. + + The resulting graph has no self-loops or parallel edges. + + Parameters + ---------- + d : int + The degree of each node. + n : integer + The number of nodes. The value of $n \times d$ must be even. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Notes + ----- + The nodes are numbered from $0$ to $n - 1$. + + Kim and Vu's paper [2]_ shows that this algorithm samples in an + asymptotically uniform way from the space of random graphs when + $d = O(n^{1 / 3 - \epsilon})$. + + Raises + ------ + + NetworkXError + If $n \times d$ is odd or $d$ is greater than or equal to $n$. + + References + ---------- + .. [1] A. Steger and N. Wormald, + Generating random regular graphs quickly, + Probability and Computing 8 (1999), 377-396, 1999. + https://doi.org/10.1017/S0963548399003867 + + .. [2] Jeong Han Kim and Van H. Vu, + Generating random regular graphs, + Proceedings of the thirty-fifth ACM symposium on Theory of computing, + San Diego, CA, USA, pp 213--222, 2003. + http://portal.acm.org/citation.cfm?id=780542.780576 + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if (n * d) % 2 != 0: + raise nx.NetworkXError("n * d must be even") + + if not 0 <= d < n: + raise nx.NetworkXError("the 0 <= d < n inequality must be satisfied") + + G = nx.empty_graph(n, create_using=create_using) + + if d == 0: + return G + + def _suitable(edges, potential_edges): + # Helper subroutine to check if there are suitable edges remaining + # If False, the generation of the graph has failed + if not potential_edges: + return True + for s1 in potential_edges: + for s2 in potential_edges: + # Two iterators on the same dictionary are guaranteed + # to visit it in the same order if there are no + # intervening modifications. + if s1 == s2: + # Only need to consider s1-s2 pair one time + break + if s1 > s2: + s1, s2 = s2, s1 + if (s1, s2) not in edges: + return True + return False + + def _try_creation(): + # Attempt to create an edge set + + edges = set() + stubs = list(range(n)) * d + + while stubs: + potential_edges = defaultdict(lambda: 0) + seed.shuffle(stubs) + stubiter = iter(stubs) + for s1, s2 in zip(stubiter, stubiter): + if s1 > s2: + s1, s2 = s2, s1 + if s1 != s2 and ((s1, s2) not in edges): + edges.add((s1, s2)) + else: + potential_edges[s1] += 1 + potential_edges[s2] += 1 + + if not _suitable(edges, potential_edges): + return None # failed to find suitable edge set + + stubs = [ + node + for node, potential in potential_edges.items() + for _ in range(potential) + ] + return edges + + # Even though a suitable edge set exists, + # the generation of such a set is not guaranteed. + # Try repeatedly to find one. + edges = _try_creation() + while edges is None: + edges = _try_creation() + G.add_edges_from(edges) + + return G + + +def _random_subset(seq, m, rng): + """Return m unique elements from seq. + + This differs from random.sample which can return repeated + elements if seq holds repeated elements. + + Note: rng is a random.Random or numpy.random.RandomState instance. + """ + targets = set() + while len(targets) < m: + x = rng.choice(seq) + targets.add(x) + return targets + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def barabasi_albert_graph(n, m, seed=None, initial_graph=None, *, create_using=None): + """Returns a random graph using Barabási–Albert preferential attachment + + A graph of $n$ nodes is grown by attaching new nodes each with $m$ + edges that are preferentially attached to existing nodes with high degree. + + Parameters + ---------- + n : int + Number of nodes + m : int + Number of edges to attach from a new node to existing nodes + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + initial_graph : Graph or None (default) + Initial network for Barabási–Albert algorithm. + It should be a connected graph for most use cases. + A copy of `initial_graph` is used. + If None, starts from a star graph on (m+1) nodes. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If `m` does not satisfy ``1 <= m < n``, or + the initial graph number of nodes m0 does not satisfy ``m <= m0 <= n``. + + References + ---------- + .. [1] A. L. Barabási and R. Albert "Emergence of scaling in + random networks", Science 286, pp 509-512, 1999. + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if m < 1 or m >= n: + raise nx.NetworkXError( + f"Barabási–Albert network must have m >= 1 and m < n, m = {m}, n = {n}" + ) + + if initial_graph is None: + # Default initial graph : star graph on (m + 1) nodes + G = star_graph(m, create_using) + else: + if len(initial_graph) < m or len(initial_graph) > n: + raise nx.NetworkXError( + f"Barabási–Albert initial graph needs between m={m} and n={n} nodes" + ) + G = initial_graph.copy() + + # List of existing nodes, with nodes repeated once for each adjacent edge + repeated_nodes = [n for n, d in G.degree() for _ in range(d)] + # Start adding the other n - m0 nodes. + source = len(G) + while source < n: + # Now choose m unique nodes from the existing nodes + # Pick uniformly from repeated_nodes (preferential attachment) + targets = _random_subset(repeated_nodes, m, seed) + # Add edges to m nodes from the source. + G.add_edges_from(zip([source] * m, targets)) + # Add one node to the list for each new edge just created. + repeated_nodes.extend(targets) + # And the new node "source" has m edges to add to the list. + repeated_nodes.extend([source] * m) + + source += 1 + return G + + +@py_random_state(4) +@nx._dispatchable(graphs=None, returns_graph=True) +def dual_barabasi_albert_graph( + n, m1, m2, p, seed=None, initial_graph=None, *, create_using=None +): + """Returns a random graph using dual Barabási–Albert preferential attachment + + A graph of $n$ nodes is grown by attaching new nodes each with either $m_1$ + edges (with probability $p$) or $m_2$ edges (with probability $1-p$) that + are preferentially attached to existing nodes with high degree. + + Parameters + ---------- + n : int + Number of nodes + m1 : int + Number of edges to link each new node to existing nodes with probability $p$ + m2 : int + Number of edges to link each new node to existing nodes with probability $1-p$ + p : float + The probability of attaching $m_1$ edges (as opposed to $m_2$ edges) + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + initial_graph : Graph or None (default) + Initial network for Barabási–Albert algorithm. + A copy of `initial_graph` is used. + It should be connected for most use cases. + If None, starts from an star graph on max(m1, m2) + 1 nodes. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If `m1` and `m2` do not satisfy ``1 <= m1,m2 < n``, or + `p` does not satisfy ``0 <= p <= 1``, or + the initial graph number of nodes m0 does not satisfy m1, m2 <= m0 <= n. + + References + ---------- + .. [1] N. Moshiri "The dual-Barabasi-Albert model", arXiv:1810.10538. + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if m1 < 1 or m1 >= n: + raise nx.NetworkXError( + f"Dual Barabási–Albert must have m1 >= 1 and m1 < n, m1 = {m1}, n = {n}" + ) + if m2 < 1 or m2 >= n: + raise nx.NetworkXError( + f"Dual Barabási–Albert must have m2 >= 1 and m2 < n, m2 = {m2}, n = {n}" + ) + if p < 0 or p > 1: + raise nx.NetworkXError( + f"Dual Barabási–Albert network must have 0 <= p <= 1, p = {p}" + ) + + # For simplicity, if p == 0 or 1, just return BA + if p == 1: + return barabasi_albert_graph(n, m1, seed, create_using=create_using) + elif p == 0: + return barabasi_albert_graph(n, m2, seed, create_using=create_using) + + if initial_graph is None: + # Default initial graph : star graph on max(m1, m2) nodes + G = star_graph(max(m1, m2), create_using) + else: + if len(initial_graph) < max(m1, m2) or len(initial_graph) > n: + raise nx.NetworkXError( + f"Barabási–Albert initial graph must have between " + f"max(m1, m2) = {max(m1, m2)} and n = {n} nodes" + ) + G = initial_graph.copy() + + # Target nodes for new edges + targets = list(G) + # List of existing nodes, with nodes repeated once for each adjacent edge + repeated_nodes = [n for n, d in G.degree() for _ in range(d)] + # Start adding the remaining nodes. + source = len(G) + while source < n: + # Pick which m to use (m1 or m2) + if seed.random() < p: + m = m1 + else: + m = m2 + # Now choose m unique nodes from the existing nodes + # Pick uniformly from repeated_nodes (preferential attachment) + targets = _random_subset(repeated_nodes, m, seed) + # Add edges to m nodes from the source. + G.add_edges_from(zip([source] * m, targets)) + # Add one node to the list for each new edge just created. + repeated_nodes.extend(targets) + # And the new node "source" has m edges to add to the list. + repeated_nodes.extend([source] * m) + + source += 1 + return G + + +@py_random_state(4) +@nx._dispatchable(graphs=None, returns_graph=True) +def extended_barabasi_albert_graph(n, m, p, q, seed=None, *, create_using=None): + """Returns an extended Barabási–Albert model graph. + + An extended Barabási–Albert model graph is a random graph constructed + using preferential attachment. The extended model allows new edges, + rewired edges or new nodes. Based on the probabilities $p$ and $q$ + with $p + q < 1$, the growing behavior of the graph is determined as: + + 1) With $p$ probability, $m$ new edges are added to the graph, + starting from randomly chosen existing nodes and attached preferentially at the + other end. + + 2) With $q$ probability, $m$ existing edges are rewired + by randomly choosing an edge and rewiring one end to a preferentially chosen node. + + 3) With $(1 - p - q)$ probability, $m$ new nodes are added to the graph + with edges attached preferentially. + + When $p = q = 0$, the model behaves just like the Barabási–Alber model. + + Parameters + ---------- + n : int + Number of nodes + m : int + Number of edges with which a new node attaches to existing nodes + p : float + Probability value for adding an edge between existing nodes. p + q < 1 + q : float + Probability value of rewiring of existing edges. p + q < 1 + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If `m` does not satisfy ``1 <= m < n`` or ``1 >= p + q`` + + References + ---------- + .. [1] Albert, R., & Barabási, A. L. (2000) + Topology of evolving networks: local events and universality + Physical review letters, 85(24), 5234. + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if m < 1 or m >= n: + msg = f"Extended Barabasi-Albert network needs m>=1 and m= 1: + msg = f"Extended Barabasi-Albert network needs p + q <= 1, p={p}, q={q}" + raise nx.NetworkXError(msg) + + # Add m initial nodes (m0 in barabasi-speak) + G = empty_graph(m, create_using) + + # List of nodes to represent the preferential attachment random selection. + # At the creation of the graph, all nodes are added to the list + # so that even nodes that are not connected have a chance to get selected, + # for rewiring and adding of edges. + # With each new edge, nodes at the ends of the edge are added to the list. + attachment_preference = [] + attachment_preference.extend(range(m)) + + # Start adding the other n-m nodes. The first node is m. + new_node = m + while new_node < n: + a_probability = seed.random() + + # Total number of edges of a Clique of all the nodes + clique_degree = len(G) - 1 + clique_size = (len(G) * clique_degree) / 2 + + # Adding m new edges, if there is room to add them + if a_probability < p and G.size() <= clique_size - m: + # Select the nodes where an edge can be added + eligible_nodes = [nd for nd, deg in G.degree() if deg < clique_degree] + for i in range(m): + # Choosing a random source node from eligible_nodes + src_node = seed.choice(eligible_nodes) + + # Picking a possible node that is not 'src_node' or + # neighbor with 'src_node', with preferential attachment + prohibited_nodes = list(G[src_node]) + prohibited_nodes.append(src_node) + # This will raise an exception if the sequence is empty + dest_node = seed.choice( + [nd for nd in attachment_preference if nd not in prohibited_nodes] + ) + # Adding the new edge + G.add_edge(src_node, dest_node) + + # Appending both nodes to add to their preferential attachment + attachment_preference.append(src_node) + attachment_preference.append(dest_node) + + # Adjusting the eligible nodes. Degree may be saturated. + if G.degree(src_node) == clique_degree: + eligible_nodes.remove(src_node) + if G.degree(dest_node) == clique_degree and dest_node in eligible_nodes: + eligible_nodes.remove(dest_node) + + # Rewiring m edges, if there are enough edges + elif p <= a_probability < (p + q) and m <= G.size() < clique_size: + # Selecting nodes that have at least 1 edge but that are not + # fully connected to ALL other nodes (center of star). + # These nodes are the pivot nodes of the edges to rewire + eligible_nodes = [nd for nd, deg in G.degree() if 0 < deg < clique_degree] + for i in range(m): + # Choosing a random source node + node = seed.choice(eligible_nodes) + + # The available nodes do have a neighbor at least. + nbr_nodes = list(G[node]) + + # Choosing the other end that will get detached + src_node = seed.choice(nbr_nodes) + + # Picking a target node that is not 'node' or + # neighbor with 'node', with preferential attachment + nbr_nodes.append(node) + dest_node = seed.choice( + [nd for nd in attachment_preference if nd not in nbr_nodes] + ) + # Rewire + G.remove_edge(node, src_node) + G.add_edge(node, dest_node) + + # Adjusting the preferential attachment list + attachment_preference.remove(src_node) + attachment_preference.append(dest_node) + + # Adjusting the eligible nodes. + # nodes may be saturated or isolated. + if G.degree(src_node) == 0 and src_node in eligible_nodes: + eligible_nodes.remove(src_node) + if dest_node in eligible_nodes: + if G.degree(dest_node) == clique_degree: + eligible_nodes.remove(dest_node) + else: + if G.degree(dest_node) == 1: + eligible_nodes.append(dest_node) + + # Adding new node with m edges + else: + # Select the edges' nodes by preferential attachment + targets = _random_subset(attachment_preference, m, seed) + G.add_edges_from(zip([new_node] * m, targets)) + + # Add one node to the list for each new edge just created. + attachment_preference.extend(targets) + # The new node has m edges to it, plus itself: m + 1 + attachment_preference.extend([new_node] * (m + 1)) + new_node += 1 + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def powerlaw_cluster_graph(n, m, p, seed=None, *, create_using=None): + """Holme and Kim algorithm for growing graphs with powerlaw + degree distribution and approximate average clustering. + + Parameters + ---------- + n : int + the number of nodes + m : int + the number of random edges to add for each new node + p : float, + Probability of adding a triangle after adding a random edge + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Notes + ----- + The average clustering has a hard time getting above a certain + cutoff that depends on `m`. This cutoff is often quite low. The + transitivity (fraction of triangles to possible triangles) seems to + decrease with network size. + + It is essentially the Barabási–Albert (BA) growth model with an + extra step that each random edge is followed by a chance of + making an edge to one of its neighbors too (and thus a triangle). + + This algorithm improves on BA in the sense that it enables a + higher average clustering to be attained if desired. + + It seems possible to have a disconnected graph with this algorithm + since the initial `m` nodes may not be all linked to a new node + on the first iteration like the BA model. + + Raises + ------ + NetworkXError + If `m` does not satisfy ``1 <= m <= n`` or `p` does not + satisfy ``0 <= p <= 1``. + + References + ---------- + .. [1] P. Holme and B. J. Kim, + "Growing scale-free networks with tunable clustering", + Phys. Rev. E, 65, 026107, 2002. + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if m < 1 or n < m: + raise nx.NetworkXError(f"NetworkXError must have m>1 and m 1 or p < 0: + raise nx.NetworkXError(f"NetworkXError p must be in [0,1], p={p}") + + G = empty_graph(m, create_using) # add m initial nodes (m0 in barabasi-speak) + repeated_nodes = list(G) # list of existing nodes to sample from + # with nodes repeated once for each adjacent edge + source = m # next node is m + while source < n: # Now add the other n-1 nodes + possible_targets = _random_subset(repeated_nodes, m, seed) + # do one preferential attachment for new node + target = possible_targets.pop() + G.add_edge(source, target) + repeated_nodes.append(target) # add one node to list for each new link + count = 1 + while count < m: # add m-1 more new links + if seed.random() < p: # clustering step: add triangle + neighborhood = [ + nbr + for nbr in G.neighbors(target) + if not G.has_edge(source, nbr) and nbr != source + ] + if neighborhood: # if there is a neighbor without a link + nbr = seed.choice(neighborhood) + G.add_edge(source, nbr) # add triangle + repeated_nodes.append(nbr) + count = count + 1 + continue # go to top of while loop + # else do preferential attachment step if above fails + target = possible_targets.pop() + G.add_edge(source, target) + repeated_nodes.append(target) + count = count + 1 + + repeated_nodes.extend([source] * m) # add source node to list m times + source += 1 + return G + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_lobster(n, p1, p2, seed=None, *, create_using=None): + """Returns a random lobster graph. + + A lobster is a tree that reduces to a caterpillar when pruning all + leaf nodes. A caterpillar is a tree that reduces to a path graph + when pruning all leaf nodes; setting `p2` to zero produces a caterpillar. + + This implementation iterates on the probabilities `p1` and `p2` to add + edges at levels 1 and 2, respectively. Graphs are therefore constructed + iteratively with uniform randomness at each level rather than being selected + uniformly at random from the set of all possible lobsters. + + Parameters + ---------- + n : int + The expected number of nodes in the backbone + p1 : float + Probability of adding an edge to the backbone + p2 : float + Probability of adding an edge one level beyond backbone + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Grap) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Raises + ------ + NetworkXError + If `p1` or `p2` parameters are >= 1 because the while loops would never finish. + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + p1, p2 = abs(p1), abs(p2) + if any(p >= 1 for p in [p1, p2]): + raise nx.NetworkXError("Probability values for `p1` and `p2` must both be < 1.") + + # a necessary ingredient in any self-respecting graph library + llen = int(2 * seed.random() * n + 0.5) + L = path_graph(llen, create_using) + # build caterpillar: add edges to path graph with probability p1 + current_node = llen - 1 + for n in range(llen): + while seed.random() < p1: # add fuzzy caterpillar parts + current_node += 1 + L.add_edge(n, current_node) + cat_node = current_node + while seed.random() < p2: # add crunchy lobster bits + current_node += 1 + L.add_edge(cat_node, current_node) + return L # voila, un lobster! + + +@py_random_state(1) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_shell_graph(constructor, seed=None, *, create_using=None): + """Returns a random shell graph for the constructor given. + + Parameters + ---------- + constructor : list of three-tuples + Represents the parameters for a shell, starting at the center + shell. Each element of the list must be of the form `(n, m, + d)`, where `n` is the number of nodes in the shell, `m` is + the number of edges in the shell, and `d` is the ratio of + inter-shell (next) edges to intra-shell edges. If `d` is zero, + there will be no intra-shell edges, and if `d` is one there + will be all possible intra-shell edges. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. Graph instances are not supported. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Examples + -------- + >>> constructor = [(10, 20, 0.8), (20, 40, 0.8)] + >>> G = nx.random_shell_graph(constructor) + + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + G = empty_graph(0, create_using) + + glist = [] + intra_edges = [] + nnodes = 0 + # create gnm graphs for each shell + for n, m, d in constructor: + inter_edges = int(m * d) + intra_edges.append(m - inter_edges) + g = nx.convert_node_labels_to_integers( + gnm_random_graph(n, inter_edges, seed=seed, create_using=G.__class__), + first_label=nnodes, + ) + glist.append(g) + nnodes += n + G = nx.operators.union(G, g) + + # connect the shells randomly + for gi in range(len(glist) - 1): + nlist1 = list(glist[gi]) + nlist2 = list(glist[gi + 1]) + total_edges = intra_edges[gi] + edge_count = 0 + while edge_count < total_edges: + u = seed.choice(nlist1) + v = seed.choice(nlist2) + if u == v or G.has_edge(u, v): + continue + else: + G.add_edge(u, v) + edge_count = edge_count + 1 + return G + + +@py_random_state(2) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_powerlaw_tree(n, gamma=3, seed=None, tries=100, *, create_using=None): + """Returns a tree with a power law degree distribution. + + Parameters + ---------- + n : int + The number of nodes. + gamma : float + Exponent of the power law. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + tries : int + Number of attempts to adjust the sequence to make it a tree. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Raises + ------ + NetworkXError + If no valid sequence is found within the maximum number of + attempts. + + Notes + ----- + A trial power law degree sequence is chosen and then elements are + swapped with new elements from a powerlaw distribution until the + sequence makes a tree (by checking, for example, that the number of + edges is one smaller than the number of nodes). + + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + # This call may raise a NetworkXError if the number of tries is succeeded. + seq = random_powerlaw_tree_sequence(n, gamma=gamma, seed=seed, tries=tries) + G = degree_sequence_tree(seq, create_using) + return G + + +@py_random_state(2) +@nx._dispatchable(graphs=None) +def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): + """Returns a degree sequence for a tree with a power law distribution. + + Parameters + ---------- + n : int, + The number of nodes. + gamma : float + Exponent of the power law. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + tries : int + Number of attempts to adjust the sequence to make it a tree. + + Raises + ------ + NetworkXError + If no valid sequence is found within the maximum number of + attempts. + + Notes + ----- + A trial power law degree sequence is chosen and then elements are + swapped with new elements from a power law distribution until + the sequence makes a tree (by checking, for example, that the number of + edges is one smaller than the number of nodes). + + """ + # get trial sequence + z = nx.utils.powerlaw_sequence(n, exponent=gamma, seed=seed) + # round to integer values in the range [0,n] + zseq = [min(n, max(round(s), 0)) for s in z] + + # another sequence to swap values from + z = nx.utils.powerlaw_sequence(tries, exponent=gamma, seed=seed) + # round to integer values in the range [0,n] + swap = [min(n, max(round(s), 0)) for s in z] + + for deg in swap: + # If this degree sequence can be the degree sequence of a tree, return + # it. It can be a tree if the number of edges is one fewer than the + # number of nodes, or in other words, `n - sum(zseq) / 2 == 1`. We + # use an equivalent condition below that avoids floating point + # operations. + if 2 * n - sum(zseq) == 2: + return zseq + index = seed.randint(0, n - 1) + zseq[index] = swap.pop() + + raise nx.NetworkXError( + f"Exceeded max ({tries}) attempts for a valid tree sequence." + ) + + +@py_random_state(3) +@nx._dispatchable(graphs=None, returns_graph=True) +def random_kernel_graph( + n, kernel_integral, kernel_root=None, seed=None, *, create_using=None +): + r"""Returns an random graph based on the specified kernel. + + The algorithm chooses each of the $[n(n-1)]/2$ possible edges with + probability specified by a kernel $\kappa(x,y)$ [1]_. The kernel + $\kappa(x,y)$ must be a symmetric (in $x,y$), non-negative, + bounded function. + + Parameters + ---------- + n : int + The number of nodes + kernel_integral : function + Function that returns the definite integral of the kernel $\kappa(x,y)$, + $F(y,a,b) := \int_a^b \kappa(x,y)dx$ + kernel_root: function (optional) + Function that returns the root $b$ of the equation $F(y,a,b) = r$. + If None, the root is found using :func:`scipy.optimize.brentq` + (this requires SciPy). + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + create_using : Graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + Multigraph and directed types are not supported and raise a ``NetworkXError``. + + Notes + ----- + The kernel is specified through its definite integral which must be + provided as one of the arguments. If the integral and root of the + kernel integral can be found in $O(1)$ time then this algorithm runs in + time $O(n+m)$ where m is the expected number of edges [2]_. + + The nodes are set to integers from $0$ to $n-1$. + + Examples + -------- + Generate an Erdős–Rényi random graph $G(n,c/n)$, with kernel + $\kappa(x,y)=c$ where $c$ is the mean expected degree. + + >>> def integral(u, w, z): + ... return c * (z - w) + >>> def root(u, w, r): + ... return r / c + w + >>> c = 1 + >>> graph = nx.random_kernel_graph(1000, integral, root) + + See Also + -------- + gnp_random_graph + expected_degree_graph + + References + ---------- + .. [1] Bollobás, Béla, Janson, S. and Riordan, O. + "The phase transition in inhomogeneous random graphs", + *Random Structures Algorithms*, 31, 3--122, 2007. + + .. [2] Hagberg A, Lemons N (2015), + "Fast Generation of Sparse Random Kernel Graphs". + PLoS ONE 10(9): e0135177, 2015. doi:10.1371/journal.pone.0135177 + """ + create_using = check_create_using(create_using, directed=False, multigraph=False) + if kernel_root is None: + import scipy as sp + + def kernel_root(y, a, r): + def my_function(b): + return kernel_integral(y, a, b) - r + + return sp.optimize.brentq(my_function, a, 1) + + graph = nx.empty_graph(create_using=create_using) + graph.add_nodes_from(range(n)) + (i, j) = (1, 1) + while i < n: + r = -math.log(1 - seed.random()) # (1-seed.random()) in (0, 1] + if kernel_integral(i / n, j / n, 1) <= r: + i, j = i + 1, i + 1 + else: + j = math.ceil(n * kernel_root(i / n, j / n, r)) + graph.add_edge(i - 1, j - 1) + return graph diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/small.py b/.venv/lib/python3.12/site-packages/networkx/generators/small.py new file mode 100644 index 0000000..719cf49 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/small.py @@ -0,0 +1,996 @@ +""" +Various small and named graphs, together with some compact generators. + +""" + +__all__ = [ + "LCF_graph", + "bull_graph", + "chvatal_graph", + "cubical_graph", + "desargues_graph", + "diamond_graph", + "dodecahedral_graph", + "frucht_graph", + "heawood_graph", + "hoffman_singleton_graph", + "house_graph", + "house_x_graph", + "icosahedral_graph", + "krackhardt_kite_graph", + "moebius_kantor_graph", + "octahedral_graph", + "pappus_graph", + "petersen_graph", + "sedgewick_maze_graph", + "tetrahedral_graph", + "truncated_cube_graph", + "truncated_tetrahedron_graph", + "tutte_graph", +] + +from functools import wraps + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.generators.classic import ( + complete_graph, + cycle_graph, + empty_graph, + path_graph, +) + + +def _raise_on_directed(func): + """ + A decorator which inspects the `create_using` argument and raises a + NetworkX exception when `create_using` is a DiGraph (class or instance) for + graph generators that do not support directed outputs. + + `create_using` may be a keyword argument or the first positional argument. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + create_using = args[0] if args else kwargs.get("create_using") + if create_using is not None: + G = nx.empty_graph(create_using=create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + return func(*args, **kwargs) + + return wrapper + + +@nx._dispatchable(graphs=None, returns_graph=True) +def LCF_graph(n, shift_list, repeats, create_using=None): + """ + Return the cubic graph specified in LCF notation. + + LCF (Lederberg-Coxeter-Fruchte) notation[1]_ is a compressed + notation used in the generation of various cubic Hamiltonian + graphs of high symmetry. See, for example, `dodecahedral_graph`, + `desargues_graph`, `heawood_graph` and `pappus_graph`. + + Nodes are drawn from ``range(n)``. Each node ``n_i`` is connected with + node ``n_i + shift % n`` where ``shift`` is given by cycling through + the input `shift_list` `repeat` s times. + + Parameters + ---------- + n : int + The starting graph is the `n`-cycle with nodes ``0, ..., n-1``. + The null graph is returned if `n` < 1. + + shift_list : list + A list of integer shifts mod `n`, ``[s1, s2, .., sk]`` + + repeats : int + Integer specifying the number of times that shifts in `shift_list` + are successively applied to each current node in the n-cycle + to generate an edge between ``n_current`` and ``n_current + shift mod n``. + + Returns + ------- + G : Graph + A graph instance created from the specified LCF notation. + + Examples + -------- + The utility graph $K_{3,3}$ + + >>> G = nx.LCF_graph(6, [3, -3], 3) + >>> G.edges() + EdgeView([(0, 1), (0, 5), (0, 3), (1, 2), (1, 4), (2, 3), (2, 5), (3, 4), (4, 5)]) + + The Heawood graph: + + >>> G = nx.LCF_graph(14, [5, -5], 7) + >>> nx.is_isomorphic(G, nx.heawood_graph()) + True + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/LCF_notation + + """ + if n <= 0: + return empty_graph(0, create_using) + + # start with the n-cycle + G = cycle_graph(n, create_using) + if G.is_directed(): + raise NetworkXError("Directed Graph not supported") + G.name = "LCF_graph" + nodes = sorted(G) + + n_extra_edges = repeats * len(shift_list) + # edges are added n_extra_edges times + # (not all of these need be new) + if n_extra_edges < 1: + return G + + for i in range(n_extra_edges): + shift = shift_list[i % len(shift_list)] # cycle through shift_list + v1 = nodes[i % n] # cycle repeatedly through nodes + v2 = nodes[(i + shift) % n] + G.add_edge(v1, v2) + return G + + +# ------------------------------------------------------------------------------- +# Various small and named graphs +# ------------------------------------------------------------------------------- + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def bull_graph(create_using=None): + """ + Returns the Bull Graph + + The Bull Graph has 5 nodes and 5 edges. It is a planar undirected + graph in the form of a triangle with two disjoint pendant edges [1]_ + The name comes from the triangle and pendant edges representing + respectively the body and legs of a bull. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + A bull graph with 5 nodes + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Bull_graph. + + """ + G = nx.from_dict_of_lists( + {0: [1, 2], 1: [0, 2, 3], 2: [0, 1, 4], 3: [1], 4: [2]}, + create_using=create_using, + ) + G.name = "Bull Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def chvatal_graph(create_using=None): + """ + Returns the Chvátal Graph + + The Chvátal Graph is an undirected graph with 12 nodes and 24 edges [1]_. + It has 370 distinct (directed) Hamiltonian cycles, giving a unique generalized + LCF notation of order 4, two of order 6 , and 43 of order 1 [2]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + The Chvátal graph with 12 nodes and 24 edges + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Chv%C3%A1tal_graph + .. [2] https://mathworld.wolfram.com/ChvatalGraph.html + + """ + G = nx.from_dict_of_lists( + { + 0: [1, 4, 6, 9], + 1: [2, 5, 7], + 2: [3, 6, 8], + 3: [4, 7, 9], + 4: [5, 8], + 5: [10, 11], + 6: [10, 11], + 7: [8, 11], + 8: [10], + 9: [10, 11], + }, + create_using=create_using, + ) + G.name = "Chvatal Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def cubical_graph(create_using=None): + """ + Returns the 3-regular Platonic Cubical Graph + + The skeleton of the cube (the nodes and edges) form a graph, with 8 + nodes, and 12 edges. It is a special case of the hypercube graph. + It is one of 5 Platonic graphs, each a skeleton of its + Platonic solid [1]_. + Such graphs arise in parallel processing in computers. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + A cubical graph with 8 nodes and 12 edges + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Cube#Cubical_graph + + """ + G = nx.from_dict_of_lists( + { + 0: [1, 3, 4], + 1: [0, 2, 7], + 2: [1, 3, 6], + 3: [0, 2, 5], + 4: [0, 5, 7], + 5: [3, 4, 6], + 6: [2, 5, 7], + 7: [1, 4, 6], + }, + create_using=create_using, + ) + G.name = "Platonic Cubical Graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def desargues_graph(create_using=None): + """ + Returns the Desargues Graph + + The Desargues Graph is a non-planar, distance-transitive cubic graph + with 20 nodes and 30 edges [1]_. + It is a symmetric graph. It can be represented in LCF notation + as [5,-5,9,-9]^5 [2]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Desargues Graph with 20 nodes and 30 edges + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Desargues_graph + .. [2] https://mathworld.wolfram.com/DesarguesGraph.html + """ + G = LCF_graph(20, [5, -5, 9, -9], 5, create_using) + G.name = "Desargues Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def diamond_graph(create_using=None): + """ + Returns the Diamond graph + + The Diamond Graph is planar undirected graph with 4 nodes and 5 edges. + It is also sometimes known as the double triangle graph or kite graph [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Diamond Graph with 4 nodes and 5 edges + + References + ---------- + .. [1] https://mathworld.wolfram.com/DiamondGraph.html + """ + G = nx.from_dict_of_lists( + {0: [1, 2], 1: [0, 2, 3], 2: [0, 1, 3], 3: [1, 2]}, create_using=create_using + ) + G.name = "Diamond Graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def dodecahedral_graph(create_using=None): + """ + Returns the Platonic Dodecahedral graph. + + The dodecahedral graph has 20 nodes and 30 edges. The skeleton of the + dodecahedron forms a graph. It is one of 5 Platonic graphs [1]_. + It can be described in LCF notation as: + ``[10, 7, 4, -4, -7, 10, -4, 7, -7, 4]^2`` [2]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Dodecahedral Graph with 20 nodes and 30 edges + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Regular_dodecahedron#Dodecahedral_graph + .. [2] https://mathworld.wolfram.com/DodecahedralGraph.html + + """ + G = LCF_graph(20, [10, 7, 4, -4, -7, 10, -4, 7, -7, 4], 2, create_using) + G.name = "Dodecahedral Graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def frucht_graph(create_using=None): + """ + Returns the Frucht Graph. + + The Frucht Graph is the smallest cubical graph whose + automorphism group consists only of the identity element [1]_. + It has 12 nodes and 18 edges and no nontrivial symmetries. + It is planar and Hamiltonian [2]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Frucht Graph with 12 nodes and 18 edges + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Frucht_graph + .. [2] https://mathworld.wolfram.com/FruchtGraph.html + + """ + G = cycle_graph(7, create_using) + G.add_edges_from( + [ + [0, 7], + [1, 7], + [2, 8], + [3, 9], + [4, 9], + [5, 10], + [6, 10], + [7, 11], + [8, 11], + [8, 9], + [10, 11], + ] + ) + + G.name = "Frucht Graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def heawood_graph(create_using=None): + """ + Returns the Heawood Graph, a (3,6) cage. + + The Heawood Graph is an undirected graph with 14 nodes and 21 edges, + named after Percy John Heawood [1]_. + It is cubic symmetric, nonplanar, Hamiltonian, and can be represented + in LCF notation as ``[5,-5]^7`` [2]_. + It is the unique (3,6)-cage: the regular cubic graph of girth 6 with + minimal number of vertices [3]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Heawood Graph with 14 nodes and 21 edges + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Heawood_graph + .. [2] https://mathworld.wolfram.com/HeawoodGraph.html + .. [3] https://www.win.tue.nl/~aeb/graphs/Heawood.html + + """ + G = LCF_graph(14, [5, -5], 7, create_using) + G.name = "Heawood Graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def hoffman_singleton_graph(): + """ + Returns the Hoffman-Singleton Graph. + + The Hoffman–Singleton graph is a symmetrical undirected graph + with 50 nodes and 175 edges. + All indices lie in ``Z % 5``: that is, the integers mod 5 [1]_. + It is the only regular graph of vertex degree 7, diameter 2, and girth 5. + It is the unique (7,5)-cage graph and Moore graph, and contains many + copies of the Petersen graph [2]_. + + Returns + ------- + G : networkx Graph + Hoffman–Singleton Graph with 50 nodes and 175 edges + + Notes + ----- + Constructed from pentagon and pentagram as follows: Take five pentagons $P_h$ + and five pentagrams $Q_i$ . Join vertex $j$ of $P_h$ to vertex $h·i+j$ of $Q_i$ [3]_. + + References + ---------- + .. [1] https://blogs.ams.org/visualinsight/2016/02/01/hoffman-singleton-graph/ + .. [2] https://mathworld.wolfram.com/Hoffman-SingletonGraph.html + .. [3] https://en.wikipedia.org/wiki/Hoffman%E2%80%93Singleton_graph + + """ + G = nx.Graph() + for i in range(5): + for j in range(5): + G.add_edge(("pentagon", i, j), ("pentagon", i, (j - 1) % 5)) + G.add_edge(("pentagon", i, j), ("pentagon", i, (j + 1) % 5)) + G.add_edge(("pentagram", i, j), ("pentagram", i, (j - 2) % 5)) + G.add_edge(("pentagram", i, j), ("pentagram", i, (j + 2) % 5)) + for k in range(5): + G.add_edge(("pentagon", i, j), ("pentagram", k, (i * k + j) % 5)) + G = nx.convert_node_labels_to_integers(G) + G.name = "Hoffman-Singleton Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def house_graph(create_using=None): + """ + Returns the House graph (square with triangle on top) + + The house graph is a simple undirected graph with + 5 nodes and 6 edges [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + House graph in the form of a square with a triangle on top + + References + ---------- + .. [1] https://mathworld.wolfram.com/HouseGraph.html + """ + G = nx.from_dict_of_lists( + {0: [1, 2], 1: [0, 3], 2: [0, 3, 4], 3: [1, 2, 4], 4: [2, 3]}, + create_using=create_using, + ) + G.name = "House Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def house_x_graph(create_using=None): + """ + Returns the House graph with a cross inside the house square. + + The House X-graph is the House graph plus the two edges connecting diagonally + opposite vertices of the square base. It is also one of the two graphs + obtained by removing two edges from the pentatope graph [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + House graph with diagonal vertices connected + + References + ---------- + .. [1] https://mathworld.wolfram.com/HouseGraph.html + """ + G = house_graph(create_using) + G.add_edges_from([(0, 3), (1, 2)]) + G.name = "House-with-X-inside Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def icosahedral_graph(create_using=None): + """ + Returns the Platonic Icosahedral graph. + + The icosahedral graph has 12 nodes and 30 edges. It is a Platonic graph + whose nodes have the connectivity of the icosahedron. It is undirected, + regular and Hamiltonian [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Icosahedral graph with 12 nodes and 30 edges. + + References + ---------- + .. [1] https://mathworld.wolfram.com/IcosahedralGraph.html + """ + G = nx.from_dict_of_lists( + { + 0: [1, 5, 7, 8, 11], + 1: [2, 5, 6, 8], + 2: [3, 6, 8, 9], + 3: [4, 6, 9, 10], + 4: [5, 6, 10, 11], + 5: [6, 11], + 7: [8, 9, 10, 11], + 8: [9], + 9: [10], + 10: [11], + }, + create_using=create_using, + ) + G.name = "Platonic Icosahedral Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def krackhardt_kite_graph(create_using=None): + """ + Returns the Krackhardt Kite Social Network. + + A 10 actor social network introduced by David Krackhardt + to illustrate different centrality measures [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Krackhardt Kite graph with 10 nodes and 18 edges + + Notes + ----- + The traditional labeling is: + Andre=1, Beverley=2, Carol=3, Diane=4, + Ed=5, Fernando=6, Garth=7, Heather=8, Ike=9, Jane=10. + + References + ---------- + .. [1] Krackhardt, David. "Assessing the Political Landscape: Structure, + Cognition, and Power in Organizations". Administrative Science Quarterly. + 35 (2): 342–369. doi:10.2307/2393394. JSTOR 2393394. June 1990. + + """ + G = nx.from_dict_of_lists( + { + 0: [1, 2, 3, 5], + 1: [0, 3, 4, 6], + 2: [0, 3, 5], + 3: [0, 1, 2, 4, 5, 6], + 4: [1, 3, 6], + 5: [0, 2, 3, 6, 7], + 6: [1, 3, 4, 5, 7], + 7: [5, 6, 8], + 8: [7, 9], + 9: [8], + }, + create_using=create_using, + ) + G.name = "Krackhardt Kite Social Network" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def moebius_kantor_graph(create_using=None): + """ + Returns the Moebius-Kantor graph. + + The Möbius-Kantor graph is the cubic symmetric graph on 16 nodes. + Its LCF notation is [5,-5]^8, and it is isomorphic to the generalized + Petersen graph [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Moebius-Kantor graph + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/M%C3%B6bius%E2%80%93Kantor_graph + + """ + G = LCF_graph(16, [5, -5], 8, create_using) + G.name = "Moebius-Kantor Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def octahedral_graph(create_using=None): + """ + Returns the Platonic Octahedral graph. + + The octahedral graph is the 6-node 12-edge Platonic graph having the + connectivity of the octahedron [1]_. If 6 couples go to a party, + and each person shakes hands with every person except his or her partner, + then this graph describes the set of handshakes that take place; + for this reason it is also called the cocktail party graph [2]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Octahedral graph + + References + ---------- + .. [1] https://mathworld.wolfram.com/OctahedralGraph.html + .. [2] https://en.wikipedia.org/wiki/Tur%C3%A1n_graph#Special_cases + + """ + G = nx.from_dict_of_lists( + {0: [1, 2, 3, 4], 1: [2, 3, 5], 2: [4, 5], 3: [4, 5], 4: [5]}, + create_using=create_using, + ) + G.name = "Platonic Octahedral Graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def pappus_graph(): + """ + Returns the Pappus graph. + + The Pappus graph is a cubic symmetric distance-regular graph with 18 nodes + and 27 edges. It is Hamiltonian and can be represented in LCF notation as + [5,7,-7,7,-7,-5]^3 [1]_. + + Returns + ------- + G : networkx Graph + Pappus graph + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Pappus_graph + """ + G = LCF_graph(18, [5, 7, -7, 7, -7, -5], 3) + G.name = "Pappus Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def petersen_graph(create_using=None): + """ + Returns the Petersen graph. + + The Peterson graph is a cubic, undirected graph with 10 nodes and 15 edges [1]_. + Julius Petersen constructed the graph as the smallest counterexample + against the claim that a connected bridgeless cubic graph + has an edge colouring with three colours [2]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Petersen graph + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Petersen_graph + .. [2] https://www.win.tue.nl/~aeb/drg/graphs/Petersen.html + """ + G = nx.from_dict_of_lists( + { + 0: [1, 4, 5], + 1: [0, 2, 6], + 2: [1, 3, 7], + 3: [2, 4, 8], + 4: [3, 0, 9], + 5: [0, 7, 8], + 6: [1, 8, 9], + 7: [2, 5, 9], + 8: [3, 5, 6], + 9: [4, 6, 7], + }, + create_using=create_using, + ) + G.name = "Petersen Graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def sedgewick_maze_graph(create_using=None): + """ + Return a small maze with a cycle. + + This is the maze used in Sedgewick, 3rd Edition, Part 5, Graph + Algorithms, Chapter 18, e.g. Figure 18.2 and following [1]_. + Nodes are numbered 0,..,7 + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Small maze with a cycle + + References + ---------- + .. [1] Figure 18.2, Chapter 18, Graph Algorithms (3rd Ed), Sedgewick + """ + G = empty_graph(0, create_using) + G.add_nodes_from(range(8)) + G.add_edges_from([[0, 2], [0, 7], [0, 5]]) + G.add_edges_from([[1, 7], [2, 6]]) + G.add_edges_from([[3, 4], [3, 5]]) + G.add_edges_from([[4, 5], [4, 7], [4, 6]]) + G.name = "Sedgewick Maze" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def tetrahedral_graph(create_using=None): + """ + Returns the 3-regular Platonic Tetrahedral graph. + + Tetrahedral graph has 4 nodes and 6 edges. It is a + special case of the complete graph, K4, and wheel graph, W4. + It is one of the 5 platonic graphs [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Tetrahedral Graph + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Tetrahedron#Tetrahedral_graph + + """ + G = complete_graph(4, create_using) + G.name = "Platonic Tetrahedral Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def truncated_cube_graph(create_using=None): + """ + Returns the skeleton of the truncated cube. + + The truncated cube is an Archimedean solid with 14 regular + faces (6 octagonal and 8 triangular), 36 edges and 24 nodes [1]_. + The truncated cube is created by truncating (cutting off) the tips + of the cube one third of the way into each edge [2]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Skeleton of the truncated cube + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Truncated_cube + .. [2] https://www.coolmath.com/reference/polyhedra-truncated-cube + + """ + G = nx.from_dict_of_lists( + { + 0: [1, 2, 4], + 1: [11, 14], + 2: [3, 4], + 3: [6, 8], + 4: [5], + 5: [16, 18], + 6: [7, 8], + 7: [10, 12], + 8: [9], + 9: [17, 20], + 10: [11, 12], + 11: [14], + 12: [13], + 13: [21, 22], + 14: [15], + 15: [19, 23], + 16: [17, 18], + 17: [20], + 18: [19], + 19: [23], + 20: [21], + 21: [22], + 22: [23], + }, + create_using=create_using, + ) + G.name = "Truncated Cube Graph" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def truncated_tetrahedron_graph(create_using=None): + """ + Returns the skeleton of the truncated Platonic tetrahedron. + + The truncated tetrahedron is an Archimedean solid with 4 regular hexagonal faces, + 4 equilateral triangle faces, 12 nodes and 18 edges. It can be constructed by truncating + all 4 vertices of a regular tetrahedron at one third of the original edge length [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Skeleton of the truncated tetrahedron + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Truncated_tetrahedron + + """ + G = path_graph(12, create_using) + G.add_edges_from([(0, 2), (0, 9), (1, 6), (3, 11), (4, 11), (5, 7), (8, 10)]) + G.name = "Truncated Tetrahedron Graph" + return G + + +@_raise_on_directed +@nx._dispatchable(graphs=None, returns_graph=True) +def tutte_graph(create_using=None): + """ + Returns the Tutte graph. + + The Tutte graph is a cubic polyhedral, non-Hamiltonian graph. It has + 46 nodes and 69 edges. + It is a counterexample to Tait's conjecture that every 3-regular polyhedron + has a Hamiltonian cycle. + It can be realized geometrically from a tetrahedron by multiply truncating + three of its vertices [1]_. + + Parameters + ---------- + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + Returns + ------- + G : networkx Graph + Tutte graph + + References + ---------- + .. [1] https://en.wikipedia.org/wiki/Tutte_graph + """ + G = nx.from_dict_of_lists( + { + 0: [1, 2, 3], + 1: [4, 26], + 2: [10, 11], + 3: [18, 19], + 4: [5, 33], + 5: [6, 29], + 6: [7, 27], + 7: [8, 14], + 8: [9, 38], + 9: [10, 37], + 10: [39], + 11: [12, 39], + 12: [13, 35], + 13: [14, 15], + 14: [34], + 15: [16, 22], + 16: [17, 44], + 17: [18, 43], + 18: [45], + 19: [20, 45], + 20: [21, 41], + 21: [22, 23], + 22: [40], + 23: [24, 27], + 24: [25, 32], + 25: [26, 31], + 26: [33], + 27: [28], + 28: [29, 32], + 29: [30], + 30: [31, 33], + 31: [32], + 34: [35, 38], + 35: [36], + 36: [37, 39], + 37: [38], + 40: [41, 44], + 41: [42], + 42: [43, 45], + 43: [44], + }, + create_using=create_using, + ) + G.name = "Tutte's Graph" + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/social.py b/.venv/lib/python3.12/site-packages/networkx/generators/social.py new file mode 100644 index 0000000..f41b2d8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/social.py @@ -0,0 +1,554 @@ +""" +Famous social networks. +""" + +import networkx as nx + +__all__ = [ + "karate_club_graph", + "davis_southern_women_graph", + "florentine_families_graph", + "les_miserables_graph", +] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def karate_club_graph(): + """Returns Zachary's Karate Club graph. + + Each node in the returned graph has a node attribute 'club' that + indicates the name of the club to which the member represented by that node + belongs, either 'Mr. Hi' or 'Officer'. Each edge has a weight based on the + number of contexts in which that edge's incident node members interacted. + + The dataset is derived from the 'Club After Split From Data' column of Table 3 in [1]_. + This was in turn derived from the 'Club After Fission' column of Table 1 in the + same paper. Note that the nodes are 0-indexed in NetworkX, but 1-indexed in the + paper (the 'Individual Number in Matrix C' column of Table 3 starts at 1). This + means, for example, that ``G.nodes[9]["club"]`` returns 'Officer', which + corresponds to row 10 of Table 3 in the paper. + + Examples + -------- + To get the name of the club to which a node belongs:: + + >>> G = nx.karate_club_graph() + >>> G.nodes[5]["club"] + 'Mr. Hi' + >>> G.nodes[9]["club"] + 'Officer' + + References + ---------- + .. [1] Zachary, Wayne W. + "An Information Flow Model for Conflict and Fission in Small Groups." + *Journal of Anthropological Research*, 33, 452--473, (1977). + """ + # Create the set of all members, and the members of each club. + all_members = set(range(34)) + club1 = {0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12, 13, 16, 17, 19, 21} + # club2 = all_members - club1 + + G = nx.Graph() + G.add_nodes_from(all_members) + G.name = "Zachary's Karate Club" + + zacharydat = """\ +0 4 5 3 3 3 3 2 2 0 2 3 2 3 0 0 0 2 0 2 0 2 0 0 0 0 0 0 0 0 0 2 0 0 +4 0 6 3 0 0 0 4 0 0 0 0 0 5 0 0 0 1 0 2 0 2 0 0 0 0 0 0 0 0 2 0 0 0 +5 6 0 3 0 0 0 4 5 1 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 2 2 0 0 0 3 0 +3 3 3 0 0 0 0 3 0 0 0 0 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +3 0 0 0 0 0 2 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +3 0 0 0 0 0 5 0 0 0 3 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +3 0 0 0 2 5 0 0 0 0 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +2 4 4 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +2 0 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 4 3 +0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 +2 0 0 0 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +1 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +3 5 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 2 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 4 +0 0 0 0 0 3 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 +2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 1 +2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 0 4 0 2 0 0 5 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 3 0 0 0 2 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 5 2 0 0 0 0 0 0 7 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 0 0 0 2 +0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4 3 0 0 0 0 0 0 0 0 4 +0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 0 2 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 0 4 0 0 0 0 0 3 2 +0 2 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 3 +2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 7 0 0 2 0 0 0 4 4 +0 0 2 0 0 0 0 0 3 0 0 0 0 0 3 3 0 0 1 0 3 0 2 5 0 0 0 0 0 4 3 4 0 5 +0 0 0 0 0 0 0 0 4 2 0 0 0 3 2 4 0 0 2 1 1 0 3 4 0 0 2 4 2 2 3 4 5 0""" + + for row, line in enumerate(zacharydat.split("\n")): + thisrow = [int(b) for b in line.split()] + for col, entry in enumerate(thisrow): + if entry >= 1: + G.add_edge(row, col, weight=entry) + + # Add the name of each member's club as a node attribute. + for v in G: + G.nodes[v]["club"] = "Mr. Hi" if v in club1 else "Officer" + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def davis_southern_women_graph(): + """Returns Davis Southern women social network. + + This is a bipartite graph. + + References + ---------- + .. [1] A. Davis, Gardner, B. B., Gardner, M. R., 1941. Deep South. + University of Chicago Press, Chicago, IL. + """ + G = nx.Graph() + # Top nodes + women = [ + "Evelyn Jefferson", + "Laura Mandeville", + "Theresa Anderson", + "Brenda Rogers", + "Charlotte McDowd", + "Frances Anderson", + "Eleanor Nye", + "Pearl Oglethorpe", + "Ruth DeSand", + "Verne Sanderson", + "Myra Liddel", + "Katherina Rogers", + "Sylvia Avondale", + "Nora Fayette", + "Helen Lloyd", + "Dorothy Murchison", + "Olivia Carleton", + "Flora Price", + ] + G.add_nodes_from(women, bipartite=0) + # Bottom nodes + events = [ + "E1", + "E2", + "E3", + "E4", + "E5", + "E6", + "E7", + "E8", + "E9", + "E10", + "E11", + "E12", + "E13", + "E14", + ] + G.add_nodes_from(events, bipartite=1) + + G.add_edges_from( + [ + ("Evelyn Jefferson", "E1"), + ("Evelyn Jefferson", "E2"), + ("Evelyn Jefferson", "E3"), + ("Evelyn Jefferson", "E4"), + ("Evelyn Jefferson", "E5"), + ("Evelyn Jefferson", "E6"), + ("Evelyn Jefferson", "E8"), + ("Evelyn Jefferson", "E9"), + ("Laura Mandeville", "E1"), + ("Laura Mandeville", "E2"), + ("Laura Mandeville", "E3"), + ("Laura Mandeville", "E5"), + ("Laura Mandeville", "E6"), + ("Laura Mandeville", "E7"), + ("Laura Mandeville", "E8"), + ("Theresa Anderson", "E2"), + ("Theresa Anderson", "E3"), + ("Theresa Anderson", "E4"), + ("Theresa Anderson", "E5"), + ("Theresa Anderson", "E6"), + ("Theresa Anderson", "E7"), + ("Theresa Anderson", "E8"), + ("Theresa Anderson", "E9"), + ("Brenda Rogers", "E1"), + ("Brenda Rogers", "E3"), + ("Brenda Rogers", "E4"), + ("Brenda Rogers", "E5"), + ("Brenda Rogers", "E6"), + ("Brenda Rogers", "E7"), + ("Brenda Rogers", "E8"), + ("Charlotte McDowd", "E3"), + ("Charlotte McDowd", "E4"), + ("Charlotte McDowd", "E5"), + ("Charlotte McDowd", "E7"), + ("Frances Anderson", "E3"), + ("Frances Anderson", "E5"), + ("Frances Anderson", "E6"), + ("Frances Anderson", "E8"), + ("Eleanor Nye", "E5"), + ("Eleanor Nye", "E6"), + ("Eleanor Nye", "E7"), + ("Eleanor Nye", "E8"), + ("Pearl Oglethorpe", "E6"), + ("Pearl Oglethorpe", "E8"), + ("Pearl Oglethorpe", "E9"), + ("Ruth DeSand", "E5"), + ("Ruth DeSand", "E7"), + ("Ruth DeSand", "E8"), + ("Ruth DeSand", "E9"), + ("Verne Sanderson", "E7"), + ("Verne Sanderson", "E8"), + ("Verne Sanderson", "E9"), + ("Verne Sanderson", "E12"), + ("Myra Liddel", "E8"), + ("Myra Liddel", "E9"), + ("Myra Liddel", "E10"), + ("Myra Liddel", "E12"), + ("Katherina Rogers", "E8"), + ("Katherina Rogers", "E9"), + ("Katherina Rogers", "E10"), + ("Katherina Rogers", "E12"), + ("Katherina Rogers", "E13"), + ("Katherina Rogers", "E14"), + ("Sylvia Avondale", "E7"), + ("Sylvia Avondale", "E8"), + ("Sylvia Avondale", "E9"), + ("Sylvia Avondale", "E10"), + ("Sylvia Avondale", "E12"), + ("Sylvia Avondale", "E13"), + ("Sylvia Avondale", "E14"), + ("Nora Fayette", "E6"), + ("Nora Fayette", "E7"), + ("Nora Fayette", "E9"), + ("Nora Fayette", "E10"), + ("Nora Fayette", "E11"), + ("Nora Fayette", "E12"), + ("Nora Fayette", "E13"), + ("Nora Fayette", "E14"), + ("Helen Lloyd", "E7"), + ("Helen Lloyd", "E8"), + ("Helen Lloyd", "E10"), + ("Helen Lloyd", "E11"), + ("Helen Lloyd", "E12"), + ("Dorothy Murchison", "E8"), + ("Dorothy Murchison", "E9"), + ("Olivia Carleton", "E9"), + ("Olivia Carleton", "E11"), + ("Flora Price", "E9"), + ("Flora Price", "E11"), + ] + ) + G.graph["top"] = women + G.graph["bottom"] = events + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def florentine_families_graph(): + """Returns Florentine families graph. + + References + ---------- + .. [1] Ronald L. Breiger and Philippa E. Pattison + Cumulated social roles: The duality of persons and their algebras,1 + Social Networks, Volume 8, Issue 3, September 1986, Pages 215-256 + """ + G = nx.Graph() + G.add_edge("Acciaiuoli", "Medici") + G.add_edge("Castellani", "Peruzzi") + G.add_edge("Castellani", "Strozzi") + G.add_edge("Castellani", "Barbadori") + G.add_edge("Medici", "Barbadori") + G.add_edge("Medici", "Ridolfi") + G.add_edge("Medici", "Tornabuoni") + G.add_edge("Medici", "Albizzi") + G.add_edge("Medici", "Salviati") + G.add_edge("Salviati", "Pazzi") + G.add_edge("Peruzzi", "Strozzi") + G.add_edge("Peruzzi", "Bischeri") + G.add_edge("Strozzi", "Ridolfi") + G.add_edge("Strozzi", "Bischeri") + G.add_edge("Ridolfi", "Tornabuoni") + G.add_edge("Tornabuoni", "Guadagni") + G.add_edge("Albizzi", "Ginori") + G.add_edge("Albizzi", "Guadagni") + G.add_edge("Bischeri", "Guadagni") + G.add_edge("Guadagni", "Lamberteschi") + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def les_miserables_graph(): + """Returns coappearance network of characters in the novel Les Miserables. + + References + ---------- + .. [1] D. E. Knuth, 1993. + The Stanford GraphBase: a platform for combinatorial computing, + pp. 74-87. New York: AcM Press. + """ + G = nx.Graph() + G.add_edge("Napoleon", "Myriel", weight=1) + G.add_edge("MlleBaptistine", "Myriel", weight=8) + G.add_edge("MmeMagloire", "Myriel", weight=10) + G.add_edge("MmeMagloire", "MlleBaptistine", weight=6) + G.add_edge("CountessDeLo", "Myriel", weight=1) + G.add_edge("Geborand", "Myriel", weight=1) + G.add_edge("Champtercier", "Myriel", weight=1) + G.add_edge("Cravatte", "Myriel", weight=1) + G.add_edge("Count", "Myriel", weight=2) + G.add_edge("OldMan", "Myriel", weight=1) + G.add_edge("Valjean", "Labarre", weight=1) + G.add_edge("Valjean", "MmeMagloire", weight=3) + G.add_edge("Valjean", "MlleBaptistine", weight=3) + G.add_edge("Valjean", "Myriel", weight=5) + G.add_edge("Marguerite", "Valjean", weight=1) + G.add_edge("MmeDeR", "Valjean", weight=1) + G.add_edge("Isabeau", "Valjean", weight=1) + G.add_edge("Gervais", "Valjean", weight=1) + G.add_edge("Listolier", "Tholomyes", weight=4) + G.add_edge("Fameuil", "Tholomyes", weight=4) + G.add_edge("Fameuil", "Listolier", weight=4) + G.add_edge("Blacheville", "Tholomyes", weight=4) + G.add_edge("Blacheville", "Listolier", weight=4) + G.add_edge("Blacheville", "Fameuil", weight=4) + G.add_edge("Favourite", "Tholomyes", weight=3) + G.add_edge("Favourite", "Listolier", weight=3) + G.add_edge("Favourite", "Fameuil", weight=3) + G.add_edge("Favourite", "Blacheville", weight=4) + G.add_edge("Dahlia", "Tholomyes", weight=3) + G.add_edge("Dahlia", "Listolier", weight=3) + G.add_edge("Dahlia", "Fameuil", weight=3) + G.add_edge("Dahlia", "Blacheville", weight=3) + G.add_edge("Dahlia", "Favourite", weight=5) + G.add_edge("Zephine", "Tholomyes", weight=3) + G.add_edge("Zephine", "Listolier", weight=3) + G.add_edge("Zephine", "Fameuil", weight=3) + G.add_edge("Zephine", "Blacheville", weight=3) + G.add_edge("Zephine", "Favourite", weight=4) + G.add_edge("Zephine", "Dahlia", weight=4) + G.add_edge("Fantine", "Tholomyes", weight=3) + G.add_edge("Fantine", "Listolier", weight=3) + G.add_edge("Fantine", "Fameuil", weight=3) + G.add_edge("Fantine", "Blacheville", weight=3) + G.add_edge("Fantine", "Favourite", weight=4) + G.add_edge("Fantine", "Dahlia", weight=4) + G.add_edge("Fantine", "Zephine", weight=4) + G.add_edge("Fantine", "Marguerite", weight=2) + G.add_edge("Fantine", "Valjean", weight=9) + G.add_edge("MmeThenardier", "Fantine", weight=2) + G.add_edge("MmeThenardier", "Valjean", weight=7) + G.add_edge("Thenardier", "MmeThenardier", weight=13) + G.add_edge("Thenardier", "Fantine", weight=1) + G.add_edge("Thenardier", "Valjean", weight=12) + G.add_edge("Cosette", "MmeThenardier", weight=4) + G.add_edge("Cosette", "Valjean", weight=31) + G.add_edge("Cosette", "Tholomyes", weight=1) + G.add_edge("Cosette", "Thenardier", weight=1) + G.add_edge("Javert", "Valjean", weight=17) + G.add_edge("Javert", "Fantine", weight=5) + G.add_edge("Javert", "Thenardier", weight=5) + G.add_edge("Javert", "MmeThenardier", weight=1) + G.add_edge("Javert", "Cosette", weight=1) + G.add_edge("Fauchelevent", "Valjean", weight=8) + G.add_edge("Fauchelevent", "Javert", weight=1) + G.add_edge("Bamatabois", "Fantine", weight=1) + G.add_edge("Bamatabois", "Javert", weight=1) + G.add_edge("Bamatabois", "Valjean", weight=2) + G.add_edge("Perpetue", "Fantine", weight=1) + G.add_edge("Simplice", "Perpetue", weight=2) + G.add_edge("Simplice", "Valjean", weight=3) + G.add_edge("Simplice", "Fantine", weight=2) + G.add_edge("Simplice", "Javert", weight=1) + G.add_edge("Scaufflaire", "Valjean", weight=1) + G.add_edge("Woman1", "Valjean", weight=2) + G.add_edge("Woman1", "Javert", weight=1) + G.add_edge("Judge", "Valjean", weight=3) + G.add_edge("Judge", "Bamatabois", weight=2) + G.add_edge("Champmathieu", "Valjean", weight=3) + G.add_edge("Champmathieu", "Judge", weight=3) + G.add_edge("Champmathieu", "Bamatabois", weight=2) + G.add_edge("Brevet", "Judge", weight=2) + G.add_edge("Brevet", "Champmathieu", weight=2) + G.add_edge("Brevet", "Valjean", weight=2) + G.add_edge("Brevet", "Bamatabois", weight=1) + G.add_edge("Chenildieu", "Judge", weight=2) + G.add_edge("Chenildieu", "Champmathieu", weight=2) + G.add_edge("Chenildieu", "Brevet", weight=2) + G.add_edge("Chenildieu", "Valjean", weight=2) + G.add_edge("Chenildieu", "Bamatabois", weight=1) + G.add_edge("Cochepaille", "Judge", weight=2) + G.add_edge("Cochepaille", "Champmathieu", weight=2) + G.add_edge("Cochepaille", "Brevet", weight=2) + G.add_edge("Cochepaille", "Chenildieu", weight=2) + G.add_edge("Cochepaille", "Valjean", weight=2) + G.add_edge("Cochepaille", "Bamatabois", weight=1) + G.add_edge("Pontmercy", "Thenardier", weight=1) + G.add_edge("Boulatruelle", "Thenardier", weight=1) + G.add_edge("Eponine", "MmeThenardier", weight=2) + G.add_edge("Eponine", "Thenardier", weight=3) + G.add_edge("Anzelma", "Eponine", weight=2) + G.add_edge("Anzelma", "Thenardier", weight=2) + G.add_edge("Anzelma", "MmeThenardier", weight=1) + G.add_edge("Woman2", "Valjean", weight=3) + G.add_edge("Woman2", "Cosette", weight=1) + G.add_edge("Woman2", "Javert", weight=1) + G.add_edge("MotherInnocent", "Fauchelevent", weight=3) + G.add_edge("MotherInnocent", "Valjean", weight=1) + G.add_edge("Gribier", "Fauchelevent", weight=2) + G.add_edge("MmeBurgon", "Jondrette", weight=1) + G.add_edge("Gavroche", "MmeBurgon", weight=2) + G.add_edge("Gavroche", "Thenardier", weight=1) + G.add_edge("Gavroche", "Javert", weight=1) + G.add_edge("Gavroche", "Valjean", weight=1) + G.add_edge("Gillenormand", "Cosette", weight=3) + G.add_edge("Gillenormand", "Valjean", weight=2) + G.add_edge("Magnon", "Gillenormand", weight=1) + G.add_edge("Magnon", "MmeThenardier", weight=1) + G.add_edge("MlleGillenormand", "Gillenormand", weight=9) + G.add_edge("MlleGillenormand", "Cosette", weight=2) + G.add_edge("MlleGillenormand", "Valjean", weight=2) + G.add_edge("MmePontmercy", "MlleGillenormand", weight=1) + G.add_edge("MmePontmercy", "Pontmercy", weight=1) + G.add_edge("MlleVaubois", "MlleGillenormand", weight=1) + G.add_edge("LtGillenormand", "MlleGillenormand", weight=2) + G.add_edge("LtGillenormand", "Gillenormand", weight=1) + G.add_edge("LtGillenormand", "Cosette", weight=1) + G.add_edge("Marius", "MlleGillenormand", weight=6) + G.add_edge("Marius", "Gillenormand", weight=12) + G.add_edge("Marius", "Pontmercy", weight=1) + G.add_edge("Marius", "LtGillenormand", weight=1) + G.add_edge("Marius", "Cosette", weight=21) + G.add_edge("Marius", "Valjean", weight=19) + G.add_edge("Marius", "Tholomyes", weight=1) + G.add_edge("Marius", "Thenardier", weight=2) + G.add_edge("Marius", "Eponine", weight=5) + G.add_edge("Marius", "Gavroche", weight=4) + G.add_edge("BaronessT", "Gillenormand", weight=1) + G.add_edge("BaronessT", "Marius", weight=1) + G.add_edge("Mabeuf", "Marius", weight=1) + G.add_edge("Mabeuf", "Eponine", weight=1) + G.add_edge("Mabeuf", "Gavroche", weight=1) + G.add_edge("Enjolras", "Marius", weight=7) + G.add_edge("Enjolras", "Gavroche", weight=7) + G.add_edge("Enjolras", "Javert", weight=6) + G.add_edge("Enjolras", "Mabeuf", weight=1) + G.add_edge("Enjolras", "Valjean", weight=4) + G.add_edge("Combeferre", "Enjolras", weight=15) + G.add_edge("Combeferre", "Marius", weight=5) + G.add_edge("Combeferre", "Gavroche", weight=6) + G.add_edge("Combeferre", "Mabeuf", weight=2) + G.add_edge("Prouvaire", "Gavroche", weight=1) + G.add_edge("Prouvaire", "Enjolras", weight=4) + G.add_edge("Prouvaire", "Combeferre", weight=2) + G.add_edge("Feuilly", "Gavroche", weight=2) + G.add_edge("Feuilly", "Enjolras", weight=6) + G.add_edge("Feuilly", "Prouvaire", weight=2) + G.add_edge("Feuilly", "Combeferre", weight=5) + G.add_edge("Feuilly", "Mabeuf", weight=1) + G.add_edge("Feuilly", "Marius", weight=1) + G.add_edge("Courfeyrac", "Marius", weight=9) + G.add_edge("Courfeyrac", "Enjolras", weight=17) + G.add_edge("Courfeyrac", "Combeferre", weight=13) + G.add_edge("Courfeyrac", "Gavroche", weight=7) + G.add_edge("Courfeyrac", "Mabeuf", weight=2) + G.add_edge("Courfeyrac", "Eponine", weight=1) + G.add_edge("Courfeyrac", "Feuilly", weight=6) + G.add_edge("Courfeyrac", "Prouvaire", weight=3) + G.add_edge("Bahorel", "Combeferre", weight=5) + G.add_edge("Bahorel", "Gavroche", weight=5) + G.add_edge("Bahorel", "Courfeyrac", weight=6) + G.add_edge("Bahorel", "Mabeuf", weight=2) + G.add_edge("Bahorel", "Enjolras", weight=4) + G.add_edge("Bahorel", "Feuilly", weight=3) + G.add_edge("Bahorel", "Prouvaire", weight=2) + G.add_edge("Bahorel", "Marius", weight=1) + G.add_edge("Bossuet", "Marius", weight=5) + G.add_edge("Bossuet", "Courfeyrac", weight=12) + G.add_edge("Bossuet", "Gavroche", weight=5) + G.add_edge("Bossuet", "Bahorel", weight=4) + G.add_edge("Bossuet", "Enjolras", weight=10) + G.add_edge("Bossuet", "Feuilly", weight=6) + G.add_edge("Bossuet", "Prouvaire", weight=2) + G.add_edge("Bossuet", "Combeferre", weight=9) + G.add_edge("Bossuet", "Mabeuf", weight=1) + G.add_edge("Bossuet", "Valjean", weight=1) + G.add_edge("Joly", "Bahorel", weight=5) + G.add_edge("Joly", "Bossuet", weight=7) + G.add_edge("Joly", "Gavroche", weight=3) + G.add_edge("Joly", "Courfeyrac", weight=5) + G.add_edge("Joly", "Enjolras", weight=5) + G.add_edge("Joly", "Feuilly", weight=5) + G.add_edge("Joly", "Prouvaire", weight=2) + G.add_edge("Joly", "Combeferre", weight=5) + G.add_edge("Joly", "Mabeuf", weight=1) + G.add_edge("Joly", "Marius", weight=2) + G.add_edge("Grantaire", "Bossuet", weight=3) + G.add_edge("Grantaire", "Enjolras", weight=3) + G.add_edge("Grantaire", "Combeferre", weight=1) + G.add_edge("Grantaire", "Courfeyrac", weight=2) + G.add_edge("Grantaire", "Joly", weight=2) + G.add_edge("Grantaire", "Gavroche", weight=1) + G.add_edge("Grantaire", "Bahorel", weight=1) + G.add_edge("Grantaire", "Feuilly", weight=1) + G.add_edge("Grantaire", "Prouvaire", weight=1) + G.add_edge("MotherPlutarch", "Mabeuf", weight=3) + G.add_edge("Gueulemer", "Thenardier", weight=5) + G.add_edge("Gueulemer", "Valjean", weight=1) + G.add_edge("Gueulemer", "MmeThenardier", weight=1) + G.add_edge("Gueulemer", "Javert", weight=1) + G.add_edge("Gueulemer", "Gavroche", weight=1) + G.add_edge("Gueulemer", "Eponine", weight=1) + G.add_edge("Babet", "Thenardier", weight=6) + G.add_edge("Babet", "Gueulemer", weight=6) + G.add_edge("Babet", "Valjean", weight=1) + G.add_edge("Babet", "MmeThenardier", weight=1) + G.add_edge("Babet", "Javert", weight=2) + G.add_edge("Babet", "Gavroche", weight=1) + G.add_edge("Babet", "Eponine", weight=1) + G.add_edge("Claquesous", "Thenardier", weight=4) + G.add_edge("Claquesous", "Babet", weight=4) + G.add_edge("Claquesous", "Gueulemer", weight=4) + G.add_edge("Claquesous", "Valjean", weight=1) + G.add_edge("Claquesous", "MmeThenardier", weight=1) + G.add_edge("Claquesous", "Javert", weight=1) + G.add_edge("Claquesous", "Eponine", weight=1) + G.add_edge("Claquesous", "Enjolras", weight=1) + G.add_edge("Montparnasse", "Javert", weight=1) + G.add_edge("Montparnasse", "Babet", weight=2) + G.add_edge("Montparnasse", "Gueulemer", weight=2) + G.add_edge("Montparnasse", "Claquesous", weight=2) + G.add_edge("Montparnasse", "Valjean", weight=1) + G.add_edge("Montparnasse", "Gavroche", weight=1) + G.add_edge("Montparnasse", "Eponine", weight=1) + G.add_edge("Montparnasse", "Thenardier", weight=1) + G.add_edge("Toussaint", "Cosette", weight=2) + G.add_edge("Toussaint", "Javert", weight=1) + G.add_edge("Toussaint", "Valjean", weight=1) + G.add_edge("Child1", "Gavroche", weight=2) + G.add_edge("Child2", "Gavroche", weight=2) + G.add_edge("Child2", "Child1", weight=3) + G.add_edge("Brujon", "Babet", weight=3) + G.add_edge("Brujon", "Gueulemer", weight=3) + G.add_edge("Brujon", "Thenardier", weight=3) + G.add_edge("Brujon", "Gavroche", weight=1) + G.add_edge("Brujon", "Eponine", weight=1) + G.add_edge("Brujon", "Claquesous", weight=1) + G.add_edge("Brujon", "Montparnasse", weight=1) + G.add_edge("MmeHucheloup", "Bossuet", weight=1) + G.add_edge("MmeHucheloup", "Joly", weight=1) + G.add_edge("MmeHucheloup", "Grantaire", weight=1) + G.add_edge("MmeHucheloup", "Bahorel", weight=1) + G.add_edge("MmeHucheloup", "Courfeyrac", weight=1) + G.add_edge("MmeHucheloup", "Gavroche", weight=1) + G.add_edge("MmeHucheloup", "Enjolras", weight=1) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/spectral_graph_forge.py b/.venv/lib/python3.12/site-packages/networkx/generators/spectral_graph_forge.py new file mode 100644 index 0000000..aa8c919 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/spectral_graph_forge.py @@ -0,0 +1,120 @@ +"""Generates graphs with a given eigenvector structure""" + +import networkx as nx +from networkx.utils import np_random_state + +__all__ = ["spectral_graph_forge"] + + +@np_random_state(3) +@nx._dispatchable(preserve_edge_attrs={"G": {"weight": 1}}, returns_graph=True) +def spectral_graph_forge(G, alpha, transformation="identity", seed=None): + """Returns a random simple graph with spectrum resembling that of `G` + + This algorithm, called Spectral Graph Forge (SGF), computes the + eigenvectors of a given graph adjacency matrix, filters them and + builds a random graph with a similar eigenstructure. + SGF has been proved to be particularly useful for synthesizing + realistic social networks and it can also be used to anonymize + graph sensitive data. + + Parameters + ---------- + G : Graph + alpha : float + Ratio representing the percentage of eigenvectors of G to consider, + values in [0,1]. + transformation : string, optional + Represents the intended matrix linear transformation, possible values + are 'identity' and 'modularity' + seed : integer, random_state, or None (default) + Indicator of numpy random number generation state. + See :ref:`Randomness`. + + Returns + ------- + H : Graph + A graph with a similar eigenvector structure of the input one. + + Raises + ------ + NetworkXError + If transformation has a value different from 'identity' or 'modularity' + + Notes + ----- + Spectral Graph Forge (SGF) generates a random simple graph resembling the + global properties of the given one. + It leverages the low-rank approximation of the associated adjacency matrix + driven by the *alpha* precision parameter. + SGF preserves the number of nodes of the input graph and their ordering. + This way, nodes of output graphs resemble the properties of the input one + and attributes can be directly mapped. + + It considers the graph adjacency matrices which can optionally be + transformed to other symmetric real matrices (currently transformation + options include *identity* and *modularity*). + The *modularity* transformation, in the sense of Newman's modularity matrix + allows the focusing on community structure related properties of the graph. + + SGF applies a low-rank approximation whose fixed rank is computed from the + ratio *alpha* of the input graph adjacency matrix dimension. + This step performs a filtering on the input eigenvectors similar to the low + pass filtering common in telecommunications. + + The filtered values (after truncation) are used as input to a Bernoulli + sampling for constructing a random adjacency matrix. + + References + ---------- + .. [1] L. Baldesi, C. T. Butts, A. Markopoulou, "Spectral Graph Forge: + Graph Generation Targeting Modularity", IEEE Infocom, '18. + https://arxiv.org/abs/1801.01715 + .. [2] M. Newman, "Networks: an introduction", Oxford university press, + 2010 + + Examples + -------- + >>> G = nx.karate_club_graph() + >>> H = nx.spectral_graph_forge(G, 0.3) + >>> + """ + import numpy as np + import scipy as sp + + available_transformations = ["identity", "modularity"] + alpha = np.clip(alpha, 0, 1) + A = nx.to_numpy_array(G) + n = A.shape[1] + level = round(n * alpha) + + if transformation not in available_transformations: + msg = f"{transformation!r} is not a valid transformation. " + msg += f"Transformations: {available_transformations}" + raise nx.NetworkXError(msg) + + K = np.ones((1, n)) @ A + + B = A + if transformation == "modularity": + B -= K.T @ K / K.sum() + + # Compute low-rank approximation of B + evals, evecs = np.linalg.eigh(B) + k = np.argsort(np.abs(evals))[::-1] # indices of evals in descending order + evecs[:, k[np.arange(level, n)]] = 0 # set smallest eigenvectors to 0 + B = evecs @ np.diag(evals) @ evecs.T + + if transformation == "modularity": + B += K.T @ K / K.sum() + + B = np.clip(B, 0, 1) + np.fill_diagonal(B, 0) + + for i in range(n - 1): + B[i, i + 1 :] = sp.stats.bernoulli.rvs(B[i, i + 1 :], random_state=seed) + B[i + 1 :, i] = np.transpose(B[i, i + 1 :]) + + H = nx.from_numpy_array(B) + + return H diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/stochastic.py b/.venv/lib/python3.12/site-packages/networkx/generators/stochastic.py new file mode 100644 index 0000000..f53e231 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/stochastic.py @@ -0,0 +1,54 @@ +"""Functions for generating stochastic graphs from a given weighted directed +graph. + +""" + +import networkx as nx +from networkx.classes import DiGraph, MultiDiGraph +from networkx.utils import not_implemented_for + +__all__ = ["stochastic_graph"] + + +@not_implemented_for("undirected") +@nx._dispatchable( + edge_attrs="weight", mutates_input={"not copy": 1}, returns_graph=True +) +def stochastic_graph(G, copy=True, weight="weight"): + """Returns a right-stochastic representation of directed graph `G`. + + A right-stochastic graph is a weighted digraph in which for each + node, the sum of the weights of all the out-edges of that node is + 1. If the graph is already weighted (for example, via a 'weight' + edge attribute), the reweighting takes that into account. + + Parameters + ---------- + G : directed graph + A :class:`~networkx.DiGraph` or :class:`~networkx.MultiDiGraph`. + + copy : boolean, optional + If this is True, then this function returns a new graph with + the stochastic reweighting. Otherwise, the original graph is + modified in-place (and also returned, for convenience). + + weight : edge attribute key (optional, default='weight') + Edge attribute key used for reading the existing weight and + setting the new weight. If no attribute with this key is found + for an edge, then the edge weight is assumed to be 1. If an edge + has a weight, it must be a positive number. + + """ + if copy: + G = MultiDiGraph(G) if G.is_multigraph() else DiGraph(G) + # There is a tradeoff here: the dictionary of node degrees may + # require a lot of memory, whereas making a call to `G.out_degree` + # inside the loop may be costly in computation time. + degree = dict(G.out_degree(weight=weight)) + for u, v, d in G.edges(data=True): + if degree[u] == 0: + d[weight] = 0 + else: + d[weight] = d.get(weight, 1) / degree[u] + nx._clear_cache(G) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/sudoku.py b/.venv/lib/python3.12/site-packages/networkx/generators/sudoku.py new file mode 100644 index 0000000..f288ed2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/sudoku.py @@ -0,0 +1,131 @@ +"""Generator for Sudoku graphs + +This module gives a generator for n-Sudoku graphs. It can be used to develop +algorithms for solving or generating Sudoku puzzles. + +A completed Sudoku grid is a 9x9 array of integers between 1 and 9, with no +number appearing twice in the same row, column, or 3x3 box. + ++---------+---------+---------+ +| | 8 6 4 | | 3 7 1 | | 2 5 9 | +| | 3 2 5 | | 8 4 9 | | 7 6 1 | +| | 9 7 1 | | 2 6 5 | | 8 4 3 | ++---------+---------+---------+ +| | 4 3 6 | | 1 9 2 | | 5 8 7 | +| | 1 9 8 | | 6 5 7 | | 4 3 2 | +| | 2 5 7 | | 4 8 3 | | 9 1 6 | ++---------+---------+---------+ +| | 6 8 9 | | 7 3 4 | | 1 2 5 | +| | 7 1 3 | | 5 2 8 | | 6 9 4 | +| | 5 4 2 | | 9 1 6 | | 3 7 8 | ++---------+---------+---------+ + + +The Sudoku graph is an undirected graph with 81 vertices, corresponding to +the cells of a Sudoku grid. It is a regular graph of degree 20. Two distinct +vertices are adjacent if and only if the corresponding cells belong to the +same row, column, or box. A completed Sudoku grid corresponds to a vertex +coloring of the Sudoku graph with nine colors. + +More generally, the n-Sudoku graph is a graph with n^4 vertices, corresponding +to the cells of an n^2 by n^2 grid. Two distinct vertices are adjacent if and +only if they belong to the same row, column, or n by n box. + +References +---------- +.. [1] Herzberg, A. M., & Murty, M. R. (2007). Sudoku squares and chromatic + polynomials. Notices of the AMS, 54(6), 708-717. +.. [2] Sander, Torsten (2009), "Sudoku graphs are integral", + Electronic Journal of Combinatorics, 16 (1): Note 25, 7pp, MR 2529816 +.. [3] Wikipedia contributors. "Glossary of Sudoku." Wikipedia, The Free + Encyclopedia, 3 Dec. 2019. Web. 22 Dec. 2019. +""" + +import networkx as nx +from networkx.exception import NetworkXError + +__all__ = ["sudoku_graph"] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def sudoku_graph(n=3): + """Returns the n-Sudoku graph. The default value of n is 3. + + The n-Sudoku graph is a graph with n^4 vertices, corresponding to the + cells of an n^2 by n^2 grid. Two distinct vertices are adjacent if and + only if they belong to the same row, column, or n-by-n box. + + Parameters + ---------- + n: integer + The order of the Sudoku graph, equal to the square root of the + number of rows. The default is 3. + + Returns + ------- + NetworkX graph + The n-Sudoku graph Sud(n). + + Examples + -------- + >>> G = nx.sudoku_graph() + >>> G.number_of_nodes() + 81 + >>> G.number_of_edges() + 810 + >>> sorted(G.neighbors(42)) + [6, 15, 24, 33, 34, 35, 36, 37, 38, 39, 40, 41, 43, 44, 51, 52, 53, 60, 69, 78] + >>> G = nx.sudoku_graph(2) + >>> G.number_of_nodes() + 16 + >>> G.number_of_edges() + 56 + + References + ---------- + .. [1] Herzberg, A. M., & Murty, M. R. (2007). Sudoku squares and chromatic + polynomials. Notices of the AMS, 54(6), 708-717. + .. [2] Sander, Torsten (2009), "Sudoku graphs are integral", + Electronic Journal of Combinatorics, 16 (1): Note 25, 7pp, MR 2529816 + .. [3] Wikipedia contributors. "Glossary of Sudoku." Wikipedia, The Free + Encyclopedia, 3 Dec. 2019. Web. 22 Dec. 2019. + """ + + if n < 0: + raise NetworkXError("The order must be greater than or equal to zero.") + + n2 = n * n + n3 = n2 * n + n4 = n3 * n + + # Construct an empty graph with n^4 nodes + G = nx.empty_graph(n4) + + # A Sudoku graph of order 0 or 1 has no edges + if n < 2: + return G + + # Add edges for cells in the same row + for row_no in range(n2): + row_start = row_no * n2 + for j in range(1, n2): + for i in range(j): + G.add_edge(row_start + i, row_start + j) + + # Add edges for cells in the same column + for col_no in range(n2): + for j in range(col_no, n4, n2): + for i in range(col_no, j, n2): + G.add_edge(i, j) + + # Add edges for cells in the same box + for band_no in range(n): + for stack_no in range(n): + box_start = n3 * band_no + n * stack_no + for j in range(1, n2): + for i in range(j): + u = box_start + (i % n) + n2 * (i // n) + v = box_start + (j % n) + n2 * (j // n) + G.add_edge(u, v) + + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..05e7924 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_atlas.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_atlas.cpython-312.pyc new file mode 100644 index 0000000..2c94470 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_atlas.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_classic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_classic.cpython-312.pyc new file mode 100644 index 0000000..cee6aa4 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_classic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_cographs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_cographs.cpython-312.pyc new file mode 100644 index 0000000..4e9cdca Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_cographs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_community.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_community.cpython-312.pyc new file mode 100644 index 0000000..ceb985b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_community.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_degree_seq.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_degree_seq.cpython-312.pyc new file mode 100644 index 0000000..1db36fb Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_degree_seq.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_directed.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_directed.cpython-312.pyc new file mode 100644 index 0000000..d45c9f7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_directed.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_duplication.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_duplication.cpython-312.pyc new file mode 100644 index 0000000..82da3b7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_duplication.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_ego.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_ego.cpython-312.pyc new file mode 100644 index 0000000..b09e6bf Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_ego.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_expanders.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_expanders.cpython-312.pyc new file mode 100644 index 0000000..c263394 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_expanders.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_geometric.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_geometric.cpython-312.pyc new file mode 100644 index 0000000..775fa71 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_geometric.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_harary_graph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_harary_graph.cpython-312.pyc new file mode 100644 index 0000000..2502509 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_harary_graph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_internet_as_graphs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_internet_as_graphs.cpython-312.pyc new file mode 100644 index 0000000..a44a968 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_internet_as_graphs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_intersection.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_intersection.cpython-312.pyc new file mode 100644 index 0000000..baaaaef Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_intersection.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_interval_graph.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_interval_graph.cpython-312.pyc new file mode 100644 index 0000000..b97c9a9 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_interval_graph.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_joint_degree_seq.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_joint_degree_seq.cpython-312.pyc new file mode 100644 index 0000000..077b167 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_joint_degree_seq.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_lattice.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_lattice.cpython-312.pyc new file mode 100644 index 0000000..08b51f1 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_lattice.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_line.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_line.cpython-312.pyc new file mode 100644 index 0000000..691869b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_line.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_mycielski.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_mycielski.cpython-312.pyc new file mode 100644 index 0000000..841f6f8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_mycielski.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_nonisomorphic_trees.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_nonisomorphic_trees.cpython-312.pyc new file mode 100644 index 0000000..5c5773b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_nonisomorphic_trees.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_random_clustered.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_random_clustered.cpython-312.pyc new file mode 100644 index 0000000..7222381 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_random_clustered.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_random_graphs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_random_graphs.cpython-312.pyc new file mode 100644 index 0000000..fd9cc34 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_random_graphs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_small.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_small.cpython-312.pyc new file mode 100644 index 0000000..6ae6d9a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_small.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_spectral_graph_forge.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_spectral_graph_forge.cpython-312.pyc new file mode 100644 index 0000000..477d0ca Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_spectral_graph_forge.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_stochastic.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_stochastic.cpython-312.pyc new file mode 100644 index 0000000..9f5ef34 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_stochastic.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_sudoku.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_sudoku.cpython-312.pyc new file mode 100644 index 0000000..d768835 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_sudoku.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_time_series.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_time_series.cpython-312.pyc new file mode 100644 index 0000000..84cec17 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_time_series.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_trees.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_trees.cpython-312.pyc new file mode 100644 index 0000000..4531b28 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_trees.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_triads.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_triads.cpython-312.pyc new file mode 100644 index 0000000..edef5f6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/generators/tests/__pycache__/test_triads.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_atlas.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_atlas.py new file mode 100644 index 0000000..add4741 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_atlas.py @@ -0,0 +1,75 @@ +from itertools import groupby + +import pytest + +import networkx as nx +from networkx import graph_atlas, graph_atlas_g +from networkx.generators.atlas import NUM_GRAPHS +from networkx.utils import edges_equal, nodes_equal, pairwise + + +class TestAtlasGraph: + """Unit tests for the :func:`~networkx.graph_atlas` function.""" + + def test_index_too_small(self): + with pytest.raises(ValueError): + graph_atlas(-1) + + def test_index_too_large(self): + with pytest.raises(ValueError): + graph_atlas(NUM_GRAPHS) + + def test_graph(self): + G = graph_atlas(6) + assert nodes_equal(G.nodes(), range(3)) + assert edges_equal(G.edges(), [(0, 1), (0, 2)]) + + +class TestAtlasGraphG: + """Unit tests for the :func:`~networkx.graph_atlas_g` function.""" + + @classmethod + def setup_class(cls): + cls.GAG = graph_atlas_g() + + def test_sizes(self): + G = self.GAG[0] + assert G.number_of_nodes() == 0 + assert G.number_of_edges() == 0 + + G = self.GAG[7] + assert G.number_of_nodes() == 3 + assert G.number_of_edges() == 3 + + def test_names(self): + for i, G in enumerate(self.GAG): + assert int(G.name[1:]) == i + + def test_nondecreasing_nodes(self): + # check for nondecreasing number of nodes + for n1, n2 in pairwise(map(len, self.GAG)): + assert n2 <= n1 + 1 + + def test_nondecreasing_edges(self): + # check for nondecreasing number of edges (for fixed number of + # nodes) + for n, group in groupby(self.GAG, key=nx.number_of_nodes): + for m1, m2 in pairwise(map(nx.number_of_edges, group)): + assert m2 <= m1 + 1 + + def test_nondecreasing_degree_sequence(self): + # Check for lexicographically nondecreasing degree sequences + # (for fixed number of nodes and edges). + # + # There are three exceptions to this rule in the order given in + # the "Atlas of Graphs" book, so we need to manually exclude + # those. + exceptions = [("G55", "G56"), ("G1007", "G1008"), ("G1012", "G1013")] + for n, group in groupby(self.GAG, key=nx.number_of_nodes): + for m, group in groupby(group, key=nx.number_of_edges): + for G1, G2 in pairwise(group): + if (G1.name, G2.name) in exceptions: + continue + d1 = sorted(d for v, d in G1.degree()) + d2 = sorted(d for v, d in G2.degree()) + assert d1 <= d2 diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_classic.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_classic.py new file mode 100644 index 0000000..9ca4a86 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_classic.py @@ -0,0 +1,639 @@ +""" +==================== +Generators - Classic +==================== + +Unit tests for various classic graph generators in generators/classic.py +""" + +import itertools +import typing + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +class TestGeneratorClassic: + def test_balanced_tree(self): + # balanced_tree(r,h) is a tree with (r**(h+1)-1)/(r-1) edges + for r, h in [(2, 2), (3, 3), (6, 2)]: + t = nx.balanced_tree(r, h) + order = t.order() + assert order == (r ** (h + 1) - 1) / (r - 1) + assert nx.is_connected(t) + assert t.size() == order - 1 + dh = nx.degree_histogram(t) + assert dh[0] == 0 # no nodes of 0 + assert dh[1] == r**h # nodes of degree 1 are leaves + assert dh[r] == 1 # root is degree r + assert dh[r + 1] == order - r**h - 1 # everyone else is degree r+1 + assert len(dh) == r + 2 + + def test_balanced_tree_star(self): + # balanced_tree(r,1) is the r-star + t = nx.balanced_tree(r=2, h=1) + assert nx.could_be_isomorphic(t, nx.star_graph(2)) + t = nx.balanced_tree(r=5, h=1) + assert nx.could_be_isomorphic(t, nx.star_graph(5)) + t = nx.balanced_tree(r=10, h=1) + assert nx.could_be_isomorphic(t, nx.star_graph(10)) + + def test_balanced_tree_path(self): + """Tests that the balanced tree with branching factor one is the + path graph. + + """ + # A tree of height four has five levels. + T = nx.balanced_tree(1, 4) + P = nx.path_graph(5) + assert nx.could_be_isomorphic(T, P) + + def test_full_rary_tree(self): + r = 2 + n = 9 + t = nx.full_rary_tree(r, n) + assert t.order() == n + assert nx.is_connected(t) + dh = nx.degree_histogram(t) + assert dh[0] == 0 # no nodes of 0 + assert dh[1] == 5 # nodes of degree 1 are leaves + assert dh[r] == 1 # root is degree r + assert dh[r + 1] == 9 - 5 - 1 # everyone else is degree r+1 + assert len(dh) == r + 2 + + def test_full_rary_tree_balanced(self): + t = nx.full_rary_tree(2, 15) + th = nx.balanced_tree(2, 3) + assert nx.could_be_isomorphic(t, th) + + def test_full_rary_tree_path(self): + t = nx.full_rary_tree(1, 10) + assert nx.could_be_isomorphic(t, nx.path_graph(10)) + + def test_full_rary_tree_empty(self): + t = nx.full_rary_tree(0, 10) + assert nx.could_be_isomorphic(t, nx.empty_graph(10)) + t = nx.full_rary_tree(3, 0) + assert nx.could_be_isomorphic(t, nx.empty_graph(0)) + + def test_full_rary_tree_3_20(self): + t = nx.full_rary_tree(3, 20) + assert t.order() == 20 + + def test_barbell_graph(self): + # number of nodes = 2*m1 + m2 (2 m1-complete graphs + m2-path + 2 edges) + # number of edges = 2*(nx.number_of_edges(m1-complete graph) + m2 + 1 + m1 = 3 + m2 = 5 + b = nx.barbell_graph(m1, m2) + assert nx.number_of_nodes(b) == 2 * m1 + m2 + assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1 + + m1 = 4 + m2 = 10 + b = nx.barbell_graph(m1, m2) + assert nx.number_of_nodes(b) == 2 * m1 + m2 + assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1 + + m1 = 3 + m2 = 20 + b = nx.barbell_graph(m1, m2) + assert nx.number_of_nodes(b) == 2 * m1 + m2 + assert nx.number_of_edges(b) == m1 * (m1 - 1) + m2 + 1 + + # Raise NetworkXError if m1<2 + m1 = 1 + m2 = 20 + pytest.raises(nx.NetworkXError, nx.barbell_graph, m1, m2) + + # Raise NetworkXError if m2<0 + m1 = 5 + m2 = -2 + pytest.raises(nx.NetworkXError, nx.barbell_graph, m1, m2) + + # nx.barbell_graph(2,m) = nx.path_graph(m+4) + m1 = 2 + m2 = 5 + b = nx.barbell_graph(m1, m2) + assert nx.could_be_isomorphic(b, nx.path_graph(m2 + 4)) + + m1 = 2 + m2 = 10 + b = nx.barbell_graph(m1, m2) + assert nx.could_be_isomorphic(b, nx.path_graph(m2 + 4)) + + m1 = 2 + m2 = 20 + b = nx.barbell_graph(m1, m2) + assert nx.could_be_isomorphic(b, nx.path_graph(m2 + 4)) + + pytest.raises( + nx.NetworkXError, nx.barbell_graph, m1, m2, create_using=nx.DiGraph() + ) + + mb = nx.barbell_graph(m1, m2, create_using=nx.MultiGraph()) + assert edges_equal(mb.edges(), b.edges()) + + def test_binomial_tree(self): + graphs = (None, nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) + for create_using in graphs: + for n in range(4): + b = nx.binomial_tree(n, create_using) + assert nx.number_of_nodes(b) == 2**n + assert nx.number_of_edges(b) == (2**n - 1) + + def test_complete_graph(self): + # complete_graph(m) is a connected graph with + # m nodes and m*(m+1)/2 edges + for m in [0, 1, 3, 5]: + g = nx.complete_graph(m) + assert nx.number_of_nodes(g) == m + assert nx.number_of_edges(g) == m * (m - 1) // 2 + + mg = nx.complete_graph(m, create_using=nx.MultiGraph) + assert edges_equal(mg.edges(), g.edges()) + + g = nx.complete_graph("abc") + assert nodes_equal(g.nodes(), ["a", "b", "c"]) + assert g.size() == 3 + + # creates a self-loop... should it? + g = nx.complete_graph("abcb") + assert nodes_equal(g.nodes(), ["a", "b", "c"]) + assert g.size() == 4 + + g = nx.complete_graph("abcb", create_using=nx.MultiGraph) + assert nodes_equal(g.nodes(), ["a", "b", "c"]) + assert g.size() == 6 + + def test_complete_digraph(self): + # complete_graph(m) is a connected graph with + # m nodes and m*(m+1)/2 edges + for m in [0, 1, 3, 5]: + g = nx.complete_graph(m, create_using=nx.DiGraph) + assert nx.number_of_nodes(g) == m + assert nx.number_of_edges(g) == m * (m - 1) + + g = nx.complete_graph("abc", create_using=nx.DiGraph) + assert len(g) == 3 + assert g.size() == 6 + assert g.is_directed() + + def test_circular_ladder_graph(self): + G = nx.circular_ladder_graph(5) + pytest.raises( + nx.NetworkXError, nx.circular_ladder_graph, 5, create_using=nx.DiGraph + ) + mG = nx.circular_ladder_graph(5, create_using=nx.MultiGraph) + assert edges_equal(mG.edges(), G.edges()) + + def test_circulant_graph(self): + # Ci_n(1) is the cycle graph for all n + Ci6_1 = nx.circulant_graph(6, [1]) + C6 = nx.cycle_graph(6) + assert edges_equal(Ci6_1.edges(), C6.edges()) + + # Ci_n(1, 2, ..., n div 2) is the complete graph for all n + Ci7 = nx.circulant_graph(7, [1, 2, 3]) + K7 = nx.complete_graph(7) + assert edges_equal(Ci7.edges(), K7.edges()) + + # Ci_6(1, 3) is K_3,3 i.e. the utility graph + Ci6_1_3 = nx.circulant_graph(6, [1, 3]) + K3_3 = nx.complete_bipartite_graph(3, 3) + assert nx.could_be_isomorphic(Ci6_1_3, K3_3) + + def test_cycle_graph(self): + G = nx.cycle_graph(4) + assert edges_equal(G.edges(), [(0, 1), (0, 3), (1, 2), (2, 3)]) + mG = nx.cycle_graph(4, create_using=nx.MultiGraph) + assert edges_equal(mG.edges(), [(0, 1), (0, 3), (1, 2), (2, 3)]) + G = nx.cycle_graph(4, create_using=nx.DiGraph) + assert not G.has_edge(2, 1) + assert G.has_edge(1, 2) + assert G.is_directed() + + G = nx.cycle_graph("abc") + assert len(G) == 3 + assert G.size() == 3 + G = nx.cycle_graph("abcb") + assert len(G) == 3 + assert G.size() == 2 + g = nx.cycle_graph("abc", nx.DiGraph) + assert len(g) == 3 + assert g.size() == 3 + assert g.is_directed() + g = nx.cycle_graph("abcb", nx.DiGraph) + assert len(g) == 3 + assert g.size() == 4 + + def test_dorogovtsev_goltsev_mendes_graph(self): + G = nx.dorogovtsev_goltsev_mendes_graph(0) + assert edges_equal(G.edges(), [(0, 1)]) + assert nodes_equal(list(G), [0, 1]) + G = nx.dorogovtsev_goltsev_mendes_graph(1) + assert edges_equal(G.edges(), [(0, 1), (0, 2), (1, 2)]) + assert nx.average_clustering(G) == 1.0 + assert nx.average_shortest_path_length(G) == 1.0 + assert sorted(nx.triangles(G).values()) == [1, 1, 1] + assert nx.is_planar(G) + G = nx.dorogovtsev_goltsev_mendes_graph(2) + assert nx.number_of_nodes(G) == 6 + assert nx.number_of_edges(G) == 9 + assert nx.average_clustering(G) == 0.75 + assert nx.average_shortest_path_length(G) == 1.4 + assert nx.is_planar(G) + G = nx.dorogovtsev_goltsev_mendes_graph(10) + assert nx.number_of_nodes(G) == 29526 + assert nx.number_of_edges(G) == 59049 + assert G.degree(0) == 1024 + assert G.degree(1) == 1024 + assert G.degree(2) == 1024 + + with pytest.raises(nx.NetworkXError, match=r"n must be greater than"): + nx.dorogovtsev_goltsev_mendes_graph(-1) + with pytest.raises(nx.NetworkXError, match=r"directed graph not supported"): + nx.dorogovtsev_goltsev_mendes_graph(7, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXError, match=r"multigraph not supported"): + nx.dorogovtsev_goltsev_mendes_graph(7, create_using=nx.MultiGraph) + with pytest.raises(nx.NetworkXError): + nx.dorogovtsev_goltsev_mendes_graph(7, create_using=nx.MultiDiGraph) + + def test_create_using(self): + G = nx.empty_graph() + assert isinstance(G, nx.Graph) + pytest.raises(TypeError, nx.empty_graph, create_using=0.0) + pytest.raises(TypeError, nx.empty_graph, create_using="Graph") + + G = nx.empty_graph(create_using=nx.MultiGraph) + assert isinstance(G, nx.MultiGraph) + G = nx.empty_graph(create_using=nx.DiGraph) + assert isinstance(G, nx.DiGraph) + + G = nx.empty_graph(create_using=nx.DiGraph, default=nx.MultiGraph) + assert isinstance(G, nx.DiGraph) + G = nx.empty_graph(create_using=None, default=nx.MultiGraph) + assert isinstance(G, nx.MultiGraph) + G = nx.empty_graph(default=nx.MultiGraph) + assert isinstance(G, nx.MultiGraph) + + G = nx.path_graph(5) + H = nx.empty_graph(create_using=G) + assert not H.is_multigraph() + assert not H.is_directed() + assert len(H) == 0 + assert G is H + + H = nx.empty_graph(create_using=nx.MultiGraph()) + assert H.is_multigraph() + assert not H.is_directed() + assert G is not H + + # test for subclasses that also use typing.Protocol. See gh-6243 + class Mixin(typing.Protocol): + pass + + class MyGraph(Mixin, nx.DiGraph): + pass + + G = nx.empty_graph(create_using=MyGraph) + + def test_empty_graph(self): + G = nx.empty_graph() + assert nx.number_of_nodes(G) == 0 + G = nx.empty_graph(42) + assert nx.number_of_nodes(G) == 42 + assert nx.number_of_edges(G) == 0 + + G = nx.empty_graph("abc") + assert len(G) == 3 + assert G.size() == 0 + + # create empty digraph + G = nx.empty_graph(42, create_using=nx.DiGraph(name="duh")) + assert nx.number_of_nodes(G) == 42 + assert nx.number_of_edges(G) == 0 + assert isinstance(G, nx.DiGraph) + + # create empty multigraph + G = nx.empty_graph(42, create_using=nx.MultiGraph(name="duh")) + assert nx.number_of_nodes(G) == 42 + assert nx.number_of_edges(G) == 0 + assert isinstance(G, nx.MultiGraph) + + # create empty graph from another + pete = nx.petersen_graph() + G = nx.empty_graph(42, create_using=pete) + assert nx.number_of_nodes(G) == 42 + assert nx.number_of_edges(G) == 0 + assert isinstance(G, nx.Graph) + + def test_ladder_graph(self): + for i, G in [ + (0, nx.empty_graph(0)), + (1, nx.path_graph(2)), + (2, nx.hypercube_graph(2)), + (10, nx.grid_graph([2, 10])), + ]: + assert nx.could_be_isomorphic(nx.ladder_graph(i), G) + + pytest.raises(nx.NetworkXError, nx.ladder_graph, 2, create_using=nx.DiGraph) + + g = nx.ladder_graph(2) + mg = nx.ladder_graph(2, create_using=nx.MultiGraph) + assert edges_equal(mg.edges(), g.edges()) + + @pytest.mark.parametrize(("m", "n"), [(3, 5), (4, 10), (3, 20)]) + def test_lollipop_graph_right_sizes(self, m, n): + G = nx.lollipop_graph(m, n) + assert nx.number_of_nodes(G) == m + n + assert nx.number_of_edges(G) == m * (m - 1) / 2 + n + + @pytest.mark.parametrize(("m", "n"), [("ab", ""), ("abc", "defg")]) + def test_lollipop_graph_size_node_sequence(self, m, n): + G = nx.lollipop_graph(m, n) + assert nx.number_of_nodes(G) == len(m) + len(n) + assert nx.number_of_edges(G) == len(m) * (len(m) - 1) / 2 + len(n) + + def test_lollipop_graph_exceptions(self): + # Raise NetworkXError if m<2 + pytest.raises(nx.NetworkXError, nx.lollipop_graph, -1, 2) + pytest.raises(nx.NetworkXError, nx.lollipop_graph, 1, 20) + pytest.raises(nx.NetworkXError, nx.lollipop_graph, "", 20) + pytest.raises(nx.NetworkXError, nx.lollipop_graph, "a", 20) + + # Raise NetworkXError if n<0 + pytest.raises(nx.NetworkXError, nx.lollipop_graph, 5, -2) + + # raise NetworkXError if create_using is directed + with pytest.raises(nx.NetworkXError): + nx.lollipop_graph(2, 20, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXError): + nx.lollipop_graph(2, 20, create_using=nx.MultiDiGraph) + + @pytest.mark.parametrize(("m", "n"), [(2, 0), (2, 5), (2, 10), ("ab", 20)]) + def test_lollipop_graph_same_as_path_when_m1_is_2(self, m, n): + G = nx.lollipop_graph(m, n) + assert nx.could_be_isomorphic(G, nx.path_graph(n + 2)) + + def test_lollipop_graph_for_multigraph(self): + G = nx.lollipop_graph(5, 20) + MG = nx.lollipop_graph(5, 20, create_using=nx.MultiGraph) + assert edges_equal(MG.edges(), G.edges()) + + @pytest.mark.parametrize( + ("m", "n"), + [(4, "abc"), ("abcd", 3), ([1, 2, 3, 4], "abc"), ("abcd", [1, 2, 3])], + ) + def test_lollipop_graph_mixing_input_types(self, m, n): + expected = nx.compose(nx.complete_graph(4), nx.path_graph(range(100, 103))) + expected.add_edge(0, 100) # Connect complete graph and path graph + assert nx.could_be_isomorphic(nx.lollipop_graph(m, n), expected) + + def test_lollipop_graph_non_builtin_ints(self): + np = pytest.importorskip("numpy") + G = nx.lollipop_graph(np.int32(4), np.int64(3)) + expected = nx.compose(nx.complete_graph(4), nx.path_graph(range(100, 103))) + expected.add_edge(0, 100) # Connect complete graph and path graph + assert nx.could_be_isomorphic(G, expected) + + def test_null_graph(self): + assert nx.number_of_nodes(nx.null_graph()) == 0 + + def test_path_graph(self): + p = nx.path_graph(0) + assert nx.could_be_isomorphic(p, nx.null_graph()) + + p = nx.path_graph(1) + assert nx.could_be_isomorphic(p, nx.empty_graph(1)) + + p = nx.path_graph(10) + assert nx.is_connected(p) + assert sorted(d for n, d in p.degree()) == [1, 1, 2, 2, 2, 2, 2, 2, 2, 2] + assert p.order() - 1 == p.size() + + dp = nx.path_graph(3, create_using=nx.DiGraph) + assert dp.has_edge(0, 1) + assert not dp.has_edge(1, 0) + + mp = nx.path_graph(10, create_using=nx.MultiGraph) + assert edges_equal(mp.edges(), p.edges()) + + G = nx.path_graph("abc") + assert len(G) == 3 + assert G.size() == 2 + G = nx.path_graph("abcb") + assert len(G) == 3 + assert G.size() == 2 + g = nx.path_graph("abc", nx.DiGraph) + assert len(g) == 3 + assert g.size() == 2 + assert g.is_directed() + g = nx.path_graph("abcb", nx.DiGraph) + assert len(g) == 3 + assert g.size() == 3 + + G = nx.path_graph((1, 2, 3, 2, 4)) + assert G.has_edge(2, 4) + + def test_star_graph(self): + assert nx.could_be_isomorphic(nx.star_graph(""), nx.empty_graph(0)) + assert nx.could_be_isomorphic(nx.star_graph([]), nx.empty_graph(0)) + assert nx.could_be_isomorphic(nx.star_graph(0), nx.empty_graph(1)) + assert nx.could_be_isomorphic(nx.star_graph(1), nx.path_graph(2)) + assert nx.could_be_isomorphic(nx.star_graph(2), nx.path_graph(3)) + assert nx.could_be_isomorphic( + nx.star_graph(5), nx.complete_bipartite_graph(1, 5) + ) + + s = nx.star_graph(10) + assert sorted(d for n, d in s.degree()) == [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10] + + pytest.raises(nx.NetworkXError, nx.star_graph, 10, create_using=nx.DiGraph) + + ms = nx.star_graph(10, create_using=nx.MultiGraph) + assert edges_equal(ms.edges(), s.edges()) + + G = nx.star_graph("abc") + assert len(G) == 3 + assert G.size() == 2 + + G = nx.star_graph("abcb") + assert len(G) == 3 + assert G.size() == 2 + G = nx.star_graph("abcb", create_using=nx.MultiGraph) + assert len(G) == 3 + assert G.size() == 3 + + G = nx.star_graph("abcdefg") + assert len(G) == 7 + assert G.size() == 6 + + def test_non_int_integers_for_star_graph(self): + np = pytest.importorskip("numpy") + G = nx.star_graph(np.int32(3)) + assert len(G) == 4 + assert G.size() == 3 + + @pytest.mark.parametrize(("m", "n"), [(3, 0), (3, 5), (4, 10), (3, 20)]) + def test_tadpole_graph_right_sizes(self, m, n): + G = nx.tadpole_graph(m, n) + assert nx.number_of_nodes(G) == m + n + assert nx.number_of_edges(G) == m + n - (m == 2) + + @pytest.mark.parametrize(("m", "n"), [("ab", ""), ("ab", "c"), ("abc", "defg")]) + def test_tadpole_graph_size_node_sequences(self, m, n): + G = nx.tadpole_graph(m, n) + assert nx.number_of_nodes(G) == len(m) + len(n) + assert nx.number_of_edges(G) == len(m) + len(n) - (len(m) == 2) + + def test_tadpole_graph_exceptions(self): + # Raise NetworkXError if m<2 + pytest.raises(nx.NetworkXError, nx.tadpole_graph, -1, 3) + pytest.raises(nx.NetworkXError, nx.tadpole_graph, 0, 3) + pytest.raises(nx.NetworkXError, nx.tadpole_graph, 1, 3) + + # Raise NetworkXError if n<0 + pytest.raises(nx.NetworkXError, nx.tadpole_graph, 5, -2) + + # Raise NetworkXError for digraphs + with pytest.raises(nx.NetworkXError): + nx.tadpole_graph(2, 20, create_using=nx.DiGraph) + with pytest.raises(nx.NetworkXError): + nx.tadpole_graph(2, 20, create_using=nx.MultiDiGraph) + + @pytest.mark.parametrize(("m", "n"), [(2, 0), (2, 5), (2, 10), ("ab", 20)]) + def test_tadpole_graph_same_as_path_when_m_is_2(self, m, n): + G = nx.tadpole_graph(m, n) + assert nx.could_be_isomorphic(G, nx.path_graph(n + 2)) + + @pytest.mark.parametrize("m", [4, 7]) + def test_tadpole_graph_same_as_cycle_when_m2_is_0(self, m): + G = nx.tadpole_graph(m, 0) + assert nx.could_be_isomorphic(G, nx.cycle_graph(m)) + + def test_tadpole_graph_for_multigraph(self): + G = nx.tadpole_graph(5, 20) + MG = nx.tadpole_graph(5, 20, create_using=nx.MultiGraph) + assert edges_equal(MG.edges(), G.edges()) + + @pytest.mark.parametrize( + ("m", "n"), + [(4, "abc"), ("abcd", 3), ([1, 2, 3, 4], "abc"), ("abcd", [1, 2, 3])], + ) + def test_tadpole_graph_mixing_input_types(self, m, n): + expected = nx.compose(nx.cycle_graph(4), nx.path_graph(range(100, 103))) + expected.add_edge(0, 100) # Connect cycle and path + assert nx.could_be_isomorphic(nx.tadpole_graph(m, n), expected) + + def test_tadpole_graph_non_builtin_integers(self): + np = pytest.importorskip("numpy") + G = nx.tadpole_graph(np.int32(4), np.int64(3)) + expected = nx.compose(nx.cycle_graph(4), nx.path_graph(range(100, 103))) + expected.add_edge(0, 100) # Connect cycle and path + assert nx.could_be_isomorphic(G, expected) + + def test_trivial_graph(self): + assert nx.number_of_nodes(nx.trivial_graph()) == 1 + + def test_turan_graph(self): + assert nx.number_of_edges(nx.turan_graph(13, 4)) == 63 + assert nx.could_be_isomorphic( + nx.turan_graph(13, 4), nx.complete_multipartite_graph(3, 4, 3, 3) + ) + + def test_wheel_graph(self): + for n, G in [ + ("", nx.null_graph()), + (0, nx.null_graph()), + (1, nx.empty_graph(1)), + (2, nx.path_graph(2)), + (3, nx.complete_graph(3)), + (4, nx.complete_graph(4)), + ]: + g = nx.wheel_graph(n) + assert nx.could_be_isomorphic(g, G) + + g = nx.wheel_graph(10) + assert sorted(d for n, d in g.degree()) == [3, 3, 3, 3, 3, 3, 3, 3, 3, 9] + + pytest.raises(nx.NetworkXError, nx.wheel_graph, 10, create_using=nx.DiGraph) + + mg = nx.wheel_graph(10, create_using=nx.MultiGraph()) + assert edges_equal(mg.edges(), g.edges()) + + G = nx.wheel_graph("abc") + assert len(G) == 3 + assert G.size() == 3 + + G = nx.wheel_graph("abcb") + assert len(G) == 3 + assert G.size() == 4 + G = nx.wheel_graph("abcb", nx.MultiGraph) + assert len(G) == 3 + assert G.size() == 6 + + def test_non_int_integers_for_wheel_graph(self): + np = pytest.importorskip("numpy") + G = nx.wheel_graph(np.int32(3)) + assert len(G) == 3 + assert G.size() == 3 + + def test_complete_0_partite_graph(self): + """Tests that the complete 0-partite graph is the null graph.""" + G = nx.complete_multipartite_graph() + H = nx.null_graph() + assert nodes_equal(G, H) + assert edges_equal(G.edges(), H.edges()) + + def test_complete_1_partite_graph(self): + """Tests that the complete 1-partite graph is the empty graph.""" + G = nx.complete_multipartite_graph(3) + H = nx.empty_graph(3) + assert nodes_equal(G, H) + assert edges_equal(G.edges(), H.edges()) + + def test_complete_2_partite_graph(self): + """Tests that the complete 2-partite graph is the complete bipartite + graph. + + """ + G = nx.complete_multipartite_graph(2, 3) + H = nx.complete_bipartite_graph(2, 3) + assert nodes_equal(G, H) + assert edges_equal(G.edges(), H.edges()) + + def test_complete_multipartite_graph(self): + """Tests for generating the complete multipartite graph.""" + G = nx.complete_multipartite_graph(2, 3, 4) + blocks = [(0, 1), (2, 3, 4), (5, 6, 7, 8)] + # Within each block, no two vertices should be adjacent. + for block in blocks: + for u, v in itertools.combinations_with_replacement(block, 2): + assert v not in G[u] + assert G.nodes[u] == G.nodes[v] + # Across blocks, all vertices should be adjacent. + for block1, block2 in itertools.combinations(blocks, 2): + for u, v in itertools.product(block1, block2): + assert v in G[u] + assert G.nodes[u] != G.nodes[v] + with pytest.raises(nx.NetworkXError, match="Negative number of nodes"): + nx.complete_multipartite_graph(2, -3, 4) + + def test_kneser_graph(self): + # the petersen graph is a special case of the kneser graph when n=5 and k=2 + assert nx.could_be_isomorphic(nx.kneser_graph(5, 2), nx.petersen_graph()) + + # when k is 1, the kneser graph returns a complete graph with n vertices + for i in range(1, 7): + assert nx.could_be_isomorphic(nx.kneser_graph(i, 1), nx.complete_graph(i)) + + # the kneser graph of n and n-1 is the empty graph with n vertices + for j in range(3, 7): + assert nx.could_be_isomorphic(nx.kneser_graph(j, j - 1), nx.empty_graph(j)) + + # in general the number of edges of the kneser graph is equal to + # (n choose k) times (n-k choose k) divided by 2 + assert nx.number_of_edges(nx.kneser_graph(8, 3)) == 280 diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_cographs.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_cographs.py new file mode 100644 index 0000000..a71849b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_cographs.py @@ -0,0 +1,18 @@ +"""Unit tests for the :mod:`networkx.generators.cographs` module.""" + +import networkx as nx + + +def test_random_cograph(): + n = 3 + G = nx.random_cograph(n) + + assert len(G) == 2**n + + # Every connected subgraph of G has diameter <= 2 + if nx.is_connected(G): + assert nx.diameter(G) <= 2 + else: + components = nx.connected_components(G) + for component in components: + assert nx.diameter(G.subgraph(component)) <= 2 diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_community.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_community.py new file mode 100644 index 0000000..2fa107f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_community.py @@ -0,0 +1,362 @@ +import pytest + +import networkx as nx + + +def test_random_partition_graph(): + G = nx.random_partition_graph([3, 3, 3], 1, 0, seed=42) + C = G.graph["partition"] + assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] + assert len(G) == 9 + assert len(list(G.edges())) == 9 + + G = nx.random_partition_graph([3, 3, 3], 0, 1) + C = G.graph["partition"] + assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] + assert len(G) == 9 + assert len(list(G.edges())) == 27 + + G = nx.random_partition_graph([3, 3, 3], 1, 0, directed=True) + C = G.graph["partition"] + assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] + assert len(G) == 9 + assert len(list(G.edges())) == 18 + + G = nx.random_partition_graph([3, 3, 3], 0, 1, directed=True) + C = G.graph["partition"] + assert C == [{0, 1, 2}, {3, 4, 5}, {6, 7, 8}] + assert len(G) == 9 + assert len(list(G.edges())) == 54 + + G = nx.random_partition_graph([1, 2, 3, 4, 5], 0.5, 0.1) + C = G.graph["partition"] + assert C == [{0}, {1, 2}, {3, 4, 5}, {6, 7, 8, 9}, {10, 11, 12, 13, 14}] + assert len(G) == 15 + + rpg = nx.random_partition_graph + pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 1.1, 0.1) + pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], -0.1, 0.1) + pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 0.1, 1.1) + pytest.raises(nx.NetworkXError, rpg, [1, 2, 3], 0.1, -0.1) + + +def test_planted_partition_graph(): + G = nx.planted_partition_graph(4, 3, 1, 0, seed=42) + C = G.graph["partition"] + assert len(C) == 4 + assert len(G) == 12 + assert len(list(G.edges())) == 12 + + G = nx.planted_partition_graph(4, 3, 0, 1) + C = G.graph["partition"] + assert len(C) == 4 + assert len(G) == 12 + assert len(list(G.edges())) == 54 + + G = nx.planted_partition_graph(10, 4, 0.5, 0.1, seed=42) + C = G.graph["partition"] + assert len(C) == 10 + assert len(G) == 40 + + G = nx.planted_partition_graph(4, 3, 1, 0, directed=True) + C = G.graph["partition"] + assert len(C) == 4 + assert len(G) == 12 + assert len(list(G.edges())) == 24 + + G = nx.planted_partition_graph(4, 3, 0, 1, directed=True) + C = G.graph["partition"] + assert len(C) == 4 + assert len(G) == 12 + assert len(list(G.edges())) == 108 + + G = nx.planted_partition_graph(10, 4, 0.5, 0.1, seed=42, directed=True) + C = G.graph["partition"] + assert len(C) == 10 + assert len(G) == 40 + + ppg = nx.planted_partition_graph + pytest.raises(nx.NetworkXError, ppg, 3, 3, 1.1, 0.1) + pytest.raises(nx.NetworkXError, ppg, 3, 3, -0.1, 0.1) + pytest.raises(nx.NetworkXError, ppg, 3, 3, 0.1, 1.1) + pytest.raises(nx.NetworkXError, ppg, 3, 3, 0.1, -0.1) + + +def test_relaxed_caveman_graph(): + G = nx.relaxed_caveman_graph(4, 3, 0) + assert len(G) == 12 + G = nx.relaxed_caveman_graph(4, 3, 1) + assert len(G) == 12 + G = nx.relaxed_caveman_graph(4, 3, 0.5) + assert len(G) == 12 + G = nx.relaxed_caveman_graph(4, 3, 0.5, seed=42) + assert len(G) == 12 + + +def test_connected_caveman_graph(): + G = nx.connected_caveman_graph(4, 3) + assert len(G) == 12 + + G = nx.connected_caveman_graph(1, 5) + K5 = nx.complete_graph(5) + K5.remove_edge(3, 4) + assert nx.is_isomorphic(G, K5) + + # need at least 2 nodes in each clique + pytest.raises(nx.NetworkXError, nx.connected_caveman_graph, 4, 1) + + +def test_caveman_graph(): + G = nx.caveman_graph(4, 3) + assert len(G) == 12 + + G = nx.caveman_graph(5, 1) + E5 = nx.empty_graph(5) + assert nx.is_isomorphic(G, E5) + + G = nx.caveman_graph(1, 5) + K5 = nx.complete_graph(5) + assert nx.is_isomorphic(G, K5) + + +def test_gaussian_random_partition_graph(): + G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01) + assert len(G) == 100 + G = nx.gaussian_random_partition_graph(100, 10, 10, 0.3, 0.01, directed=True) + assert len(G) == 100 + G = nx.gaussian_random_partition_graph( + 100, 10, 10, 0.3, 0.01, directed=False, seed=42 + ) + assert len(G) == 100 + assert not isinstance(G, nx.DiGraph) + G = nx.gaussian_random_partition_graph( + 100, 10, 10, 0.3, 0.01, directed=True, seed=42 + ) + assert len(G) == 100 + assert isinstance(G, nx.DiGraph) + pytest.raises( + nx.NetworkXError, nx.gaussian_random_partition_graph, 100, 101, 10, 1, 0 + ) + # Test when clusters are likely less than 1 + G = nx.gaussian_random_partition_graph(10, 0.5, 0.5, 0.5, 0.5, seed=1) + assert len(G) == 10 + + +def test_ring_of_cliques(): + for i in range(2, 20, 3): + for j in range(2, 20, 3): + G = nx.ring_of_cliques(i, j) + assert G.number_of_nodes() == i * j + if i != 2 or j != 1: + expected_num_edges = i * (((j * (j - 1)) // 2) + 1) + else: + # the edge that already exists cannot be duplicated + expected_num_edges = i * (((j * (j - 1)) // 2) + 1) - 1 + assert G.number_of_edges() == expected_num_edges + with pytest.raises( + nx.NetworkXError, match="A ring of cliques must have at least two cliques" + ): + nx.ring_of_cliques(1, 5) + with pytest.raises( + nx.NetworkXError, match="The cliques must have at least two nodes" + ): + nx.ring_of_cliques(3, 0) + + +def test_windmill_graph(): + for n in range(2, 20, 3): + for k in range(2, 20, 3): + G = nx.windmill_graph(n, k) + assert G.number_of_nodes() == (k - 1) * n + 1 + assert G.number_of_edges() == n * k * (k - 1) / 2 + assert G.degree(0) == G.number_of_nodes() - 1 + for i in range(1, G.number_of_nodes()): + assert G.degree(i) == k - 1 + with pytest.raises( + nx.NetworkXError, match="A windmill graph must have at least two cliques" + ): + nx.windmill_graph(1, 3) + with pytest.raises( + nx.NetworkXError, match="The cliques must have at least two nodes" + ): + nx.windmill_graph(3, 0) + + +def test_stochastic_block_model(): + sizes = [75, 75, 300] + probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + G = nx.stochastic_block_model(sizes, probs, seed=0) + C = G.graph["partition"] + assert len(C) == 3 + assert len(G) == 450 + assert G.size() == 22160 + + GG = nx.stochastic_block_model(sizes, probs, range(450), seed=0) + assert G.nodes == GG.nodes + + # Test Exceptions + sbm = nx.stochastic_block_model + badnodelist = list(range(400)) # not enough nodes to match sizes + badprobs1 = [[0.25, 0.05, 1.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + badprobs2 = [[0.25, 0.05, 0.02], [0.05, -0.35, 0.07], [0.02, 0.07, 0.40]] + probs_rect1 = [[0.25, 0.05, 0.02], [0.05, -0.35, 0.07]] + probs_rect2 = [[0.25, 0.05], [0.05, -0.35], [0.02, 0.07]] + asymprobs = [[0.25, 0.05, 0.01], [0.05, -0.35, 0.07], [0.02, 0.07, 0.40]] + pytest.raises(nx.NetworkXException, sbm, sizes, badprobs1) + pytest.raises(nx.NetworkXException, sbm, sizes, badprobs2) + pytest.raises(nx.NetworkXException, sbm, sizes, probs_rect1, directed=True) + pytest.raises(nx.NetworkXException, sbm, sizes, probs_rect2, directed=True) + pytest.raises(nx.NetworkXException, sbm, sizes, asymprobs, directed=False) + pytest.raises(nx.NetworkXException, sbm, sizes, probs, badnodelist) + nodelist = [0] + list(range(449)) # repeated node name in nodelist + pytest.raises(nx.NetworkXException, sbm, sizes, probs, nodelist) + + # Extra keyword arguments test + GG = nx.stochastic_block_model(sizes, probs, seed=0, selfloops=True) + assert G.nodes == GG.nodes + GG = nx.stochastic_block_model(sizes, probs, selfloops=True, directed=True) + assert G.nodes == GG.nodes + GG = nx.stochastic_block_model(sizes, probs, seed=0, sparse=False) + assert G.nodes == GG.nodes + + +def test_generator(): + n = 250 + tau1 = 3 + tau2 = 1.5 + mu = 0.1 + G = nx.LFR_benchmark_graph( + n, tau1, tau2, mu, average_degree=5, min_community=20, seed=10 + ) + assert len(G) == 250 + C = {frozenset(G.nodes[v]["community"]) for v in G} + assert nx.community.is_partition(G.nodes(), C) + + +def test_invalid_tau1(): + with pytest.raises(nx.NetworkXError, match="tau2 must be greater than one"): + n = 100 + tau1 = 2 + tau2 = 1 + mu = 0.1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) + + +def test_invalid_tau2(): + with pytest.raises(nx.NetworkXError, match="tau1 must be greater than one"): + n = 100 + tau1 = 1 + tau2 = 2 + mu = 0.1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) + + +def test_mu_too_large(): + with pytest.raises(nx.NetworkXError, match="mu must be in the interval \\[0, 1\\]"): + n = 100 + tau1 = 2 + tau2 = 2 + mu = 1.1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) + + +def test_mu_too_small(): + with pytest.raises(nx.NetworkXError, match="mu must be in the interval \\[0, 1\\]"): + n = 100 + tau1 = 2 + tau2 = 2 + mu = -1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2) + + +def test_both_degrees_none(): + with pytest.raises( + nx.NetworkXError, + match="Must assign exactly one of min_degree and average_degree", + ): + n = 100 + tau1 = 2 + tau2 = 2 + mu = 1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu) + + +def test_neither_degrees_none(): + with pytest.raises( + nx.NetworkXError, + match="Must assign exactly one of min_degree and average_degree", + ): + n = 100 + tau1 = 2 + tau2 = 2 + mu = 1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, average_degree=5) + + +def test_max_iters_exceeded(): + with pytest.raises( + nx.ExceededMaxIterations, + match="Could not assign communities; try increasing min_community", + ): + n = 10 + tau1 = 2 + tau2 = 2 + mu = 0.1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, max_iters=10, seed=1) + + +def test_max_deg_out_of_range(): + with pytest.raises( + nx.NetworkXError, match="max_degree must be in the interval \\(0, n\\]" + ): + n = 10 + tau1 = 2 + tau2 = 2 + mu = 0.1 + nx.LFR_benchmark_graph( + n, tau1, tau2, mu, max_degree=n + 1, max_iters=10, seed=1 + ) + + +def test_max_community(): + n = 250 + tau1 = 3 + tau2 = 1.5 + mu = 0.1 + G = nx.LFR_benchmark_graph( + n, + tau1, + tau2, + mu, + average_degree=5, + max_degree=100, + min_community=50, + max_community=200, + seed=10, + ) + assert len(G) == 250 + C = {frozenset(G.nodes[v]["community"]) for v in G} + assert nx.community.is_partition(G.nodes(), C) + + +def test_powerlaw_iterations_exceeded(): + with pytest.raises( + nx.ExceededMaxIterations, match="Could not create power law sequence" + ): + n = 100 + tau1 = 2 + tau2 = 2 + mu = 1 + nx.LFR_benchmark_graph(n, tau1, tau2, mu, min_degree=2, max_iters=0) + + +def test_no_scipy_zeta(): + zeta2 = 1.6449340668482264 + assert abs(zeta2 - nx.generators.community._hurwitz_zeta(2, 1, 0.0001)) < 0.01 + + +def test_generate_min_degree_itr(): + with pytest.raises( + nx.ExceededMaxIterations, match="Could not match average_degree" + ): + nx.generators.community._generate_min_degree(2, 2, 1, 0.01, 0) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_degree_seq.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_degree_seq.py new file mode 100644 index 0000000..c7a622f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_degree_seq.py @@ -0,0 +1,237 @@ +import pytest + +import networkx as nx + + +class TestConfigurationModel: + """Unit tests for the :func:`~networkx.configuration_model` + function. + + """ + + def test_empty_degree_sequence(self): + """Tests that an empty degree sequence yields the null graph.""" + G = nx.configuration_model([]) + assert len(G) == 0 + + def test_degree_zero(self): + """Tests that a degree sequence of all zeros yields the empty + graph. + + """ + G = nx.configuration_model([0, 0, 0]) + assert len(G) == 3 + assert G.number_of_edges() == 0 + + def test_degree_sequence(self): + """Tests that the degree sequence of the generated graph matches + the input degree sequence. + + """ + deg_seq = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + G = nx.configuration_model(deg_seq, seed=12345678) + assert sorted((d for n, d in G.degree()), reverse=True) == [ + 5, + 3, + 3, + 3, + 3, + 2, + 2, + 2, + 1, + 1, + 1, + ] + assert sorted((d for n, d in G.degree(range(len(deg_seq)))), reverse=True) == [ + 5, + 3, + 3, + 3, + 3, + 2, + 2, + 2, + 1, + 1, + 1, + ] + + def test_random_seed(self): + """Tests that each call with the same random seed generates the + same graph. + + """ + deg_seq = [3] * 12 + G1 = nx.configuration_model(deg_seq, seed=1000) + G2 = nx.configuration_model(deg_seq, seed=1000) + assert nx.is_isomorphic(G1, G2) + G1 = nx.configuration_model(deg_seq, seed=10) + G2 = nx.configuration_model(deg_seq, seed=10) + assert nx.is_isomorphic(G1, G2) + + def test_directed_disallowed(self): + """Tests that attempting to create a configuration model graph + using a directed graph yields an exception. + + """ + with pytest.raises(nx.NetworkXNotImplemented): + nx.configuration_model([], create_using=nx.DiGraph()) + + def test_odd_degree_sum(self): + """Tests that a degree sequence whose sum is odd yields an + exception. + + """ + with pytest.raises(nx.NetworkXError): + nx.configuration_model([1, 2]) + + +def test_directed_configuration_raise_unequal(): + with pytest.raises(nx.NetworkXError): + zin = [5, 3, 3, 3, 3, 2, 2, 2, 1, 1] + zout = [5, 3, 3, 3, 3, 2, 2, 2, 1, 2] + nx.directed_configuration_model(zin, zout) + + +def test_directed_configuration_model(): + G = nx.directed_configuration_model([], [], seed=0) + assert len(G) == 0 + + +def test_simple_directed_configuration_model(): + G = nx.directed_configuration_model([1, 1], [1, 1], seed=0) + assert len(G) == 2 + + +def test_expected_degree_graph_empty(): + # empty graph has empty degree sequence + deg_seq = [] + G = nx.expected_degree_graph(deg_seq) + assert dict(G.degree()) == {} + + +def test_expected_degree_graph(): + # test that fixed seed delivers the same graph + deg_seq = [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3] + G1 = nx.expected_degree_graph(deg_seq, seed=1000) + assert len(G1) == 12 + + G2 = nx.expected_degree_graph(deg_seq, seed=1000) + assert nx.is_isomorphic(G1, G2) + + G1 = nx.expected_degree_graph(deg_seq, seed=10) + G2 = nx.expected_degree_graph(deg_seq, seed=10) + assert nx.is_isomorphic(G1, G2) + + +def test_expected_degree_graph_selfloops(): + deg_seq = [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3] + G1 = nx.expected_degree_graph(deg_seq, seed=1000, selfloops=False) + G2 = nx.expected_degree_graph(deg_seq, seed=1000, selfloops=False) + assert nx.is_isomorphic(G1, G2) + assert len(G1) == 12 + + +def test_expected_degree_graph_skew(): + deg_seq = [10, 2, 2, 2, 2] + G1 = nx.expected_degree_graph(deg_seq, seed=1000) + G2 = nx.expected_degree_graph(deg_seq, seed=1000) + assert nx.is_isomorphic(G1, G2) + assert len(G1) == 5 + + +def test_havel_hakimi_construction(): + G = nx.havel_hakimi_graph([]) + assert len(G) == 0 + + z = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z) + z = ["A", 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z) + + z = [5, 4, 3, 3, 3, 2, 2, 2] + G = nx.havel_hakimi_graph(z) + G = nx.configuration_model(z) + z = [6, 5, 4, 4, 2, 1, 1, 1] + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z) + + z = [10, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2] + + G = nx.havel_hakimi_graph(z) + + pytest.raises(nx.NetworkXError, nx.havel_hakimi_graph, z, create_using=nx.DiGraph()) + + +def test_directed_havel_hakimi(): + # Test range of valid directed degree sequences + n, r = 100, 10 + p = 1.0 / r + for i in range(r): + G1 = nx.erdos_renyi_graph(n, p * (i + 1), None, True) + din1 = [d for n, d in G1.in_degree()] + dout1 = [d for n, d in G1.out_degree()] + G2 = nx.directed_havel_hakimi_graph(din1, dout1) + din2 = [d for n, d in G2.in_degree()] + dout2 = [d for n, d in G2.out_degree()] + assert sorted(din1) == sorted(din2) + assert sorted(dout1) == sorted(dout2) + + # Test non-graphical sequence + dout = [1000, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1] + din = [103, 102, 102, 102, 102, 102, 102, 102, 102, 102] + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) + # Test valid sequences + dout = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] + din = [2, 2, 2, 2, 2, 2, 2, 2, 0, 2] + G2 = nx.directed_havel_hakimi_graph(din, dout) + dout2 = (d for n, d in G2.out_degree()) + din2 = (d for n, d in G2.in_degree()) + assert sorted(dout) == sorted(dout2) + assert sorted(din) == sorted(din2) + # Test unequal sums + din = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2] + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) + # Test for negative values + din = [2, 2, 2, 2, 2, 2, 2, 2, 2, 2, -2] + pytest.raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) + + +def test_degree_sequence_tree(): + z = [1, 1, 1, 1, 1, 2, 2, 2, 3, 4] + G = nx.degree_sequence_tree(z) + assert len(G) == len(z) + assert len(list(G.edges())) == sum(z) / 2 + + pytest.raises( + nx.NetworkXError, nx.degree_sequence_tree, z, create_using=nx.DiGraph() + ) + + z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] + pytest.raises(nx.NetworkXError, nx.degree_sequence_tree, z) + + +def test_random_degree_sequence_graph(): + d = [1, 2, 2, 3] + G = nx.random_degree_sequence_graph(d, seed=42) + assert d == sorted(d for n, d in G.degree()) + + +def test_random_degree_sequence_graph_raise(): + z = [1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] + pytest.raises(nx.NetworkXUnfeasible, nx.random_degree_sequence_graph, z) + + +def test_random_degree_sequence_large(): + G1 = nx.fast_gnp_random_graph(100, 0.1, seed=42) + d1 = [d for n, d in G1.degree()] + G2 = nx.random_degree_sequence_graph(d1, seed=42) + d2 = [d for n, d in G2.degree()] + assert sorted(d1) == sorted(d2) + + +def test_random_degree_sequence_iterator(): + G1 = nx.fast_gnp_random_graph(100, 0.1, seed=42) + d1 = (d for n, d in G1.degree()) + G2 = nx.random_degree_sequence_graph(d1, seed=42) + assert len(G2) > 0 diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_directed.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_directed.py new file mode 100644 index 0000000..489ca3c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_directed.py @@ -0,0 +1,182 @@ +"""Generators - Directed Graphs +---------------------------- +""" + +import pytest + +import networkx as nx +from networkx.classes import Graph, MultiDiGraph +from networkx.generators.directed import ( + _random_k_out_graph_numpy, + _random_k_out_graph_python, + gn_graph, + gnc_graph, + gnr_graph, + random_k_out_graph, + random_uniform_k_out_graph, + scale_free_graph, +) + +try: + import numpy as np + + has_numpy = True +except ImportError: + has_numpy = False + + +class TestGeneratorsDirected: + def test_smoke_test_random_graphs(self): + gn_graph(100) + gnr_graph(100, 0.5) + gnc_graph(100) + scale_free_graph(100) + + gn_graph(100, seed=42) + gnr_graph(100, 0.5, seed=42) + gnc_graph(100, seed=42) + scale_free_graph(100, seed=42) + + def test_create_using_keyword_arguments(self): + pytest.raises(nx.NetworkXError, gn_graph, 100, create_using=Graph()) + pytest.raises(nx.NetworkXError, gnr_graph, 100, 0.5, create_using=Graph()) + pytest.raises(nx.NetworkXError, gnc_graph, 100, create_using=Graph()) + G = gn_graph(100, seed=1) + MG = gn_graph(100, create_using=MultiDiGraph(), seed=1) + assert sorted(G.edges()) == sorted(MG.edges()) + G = gnr_graph(100, 0.5, seed=1) + MG = gnr_graph(100, 0.5, create_using=MultiDiGraph(), seed=1) + assert sorted(G.edges()) == sorted(MG.edges()) + G = gnc_graph(100, seed=1) + MG = gnc_graph(100, create_using=MultiDiGraph(), seed=1) + assert sorted(G.edges()) == sorted(MG.edges()) + + G = scale_free_graph( + 100, + alpha=0.3, + beta=0.4, + gamma=0.3, + delta_in=0.3, + delta_out=0.1, + initial_graph=nx.cycle_graph(4, create_using=MultiDiGraph), + seed=1, + ) + pytest.raises(ValueError, scale_free_graph, 100, 0.5, 0.4, 0.3) + pytest.raises(ValueError, scale_free_graph, 100, alpha=-0.3) + pytest.raises(ValueError, scale_free_graph, 100, beta=-0.3) + pytest.raises(ValueError, scale_free_graph, 100, gamma=-0.3) + + def test_parameters(self): + G = nx.DiGraph() + G.add_node(0) + + def kernel(x): + return x + + assert nx.is_isomorphic(gn_graph(1), G) + assert nx.is_isomorphic(gn_graph(1, kernel=kernel), G) + assert nx.is_isomorphic(gnc_graph(1), G) + assert nx.is_isomorphic(gnr_graph(1, 0.5), G) + + +def test_scale_free_graph_negative_delta(): + with pytest.raises(ValueError, match="delta_in must be >= 0."): + scale_free_graph(10, delta_in=-1) + with pytest.raises(ValueError, match="delta_out must be >= 0."): + scale_free_graph(10, delta_out=-1) + + +def test_non_numeric_ordering(): + G = MultiDiGraph([("a", "b"), ("b", "c"), ("c", "a")]) + s = scale_free_graph(3, initial_graph=G) + assert len(s) == 3 + assert len(s.edges) == 3 + + +@pytest.mark.parametrize("ig", (nx.Graph(), nx.DiGraph([(0, 1)]))) +def test_scale_free_graph_initial_graph_kwarg(ig): + with pytest.raises(nx.NetworkXError): + scale_free_graph(100, initial_graph=ig) + + +class TestRandomKOutGraph: + """Unit tests for the + :func:`~networkx.generators.directed.random_k_out_graph` function. + + """ + + @pytest.mark.parametrize( + "f", (_random_k_out_graph_numpy, _random_k_out_graph_python) + ) + def test_regularity(self, f): + """Tests that the generated graph is `k`-out-regular.""" + if (f == _random_k_out_graph_numpy) and not has_numpy: + pytest.skip() + n = 10 + k = 3 + alpha = 1 + G = f(n, k, alpha) + assert all(d == k for v, d in G.out_degree()) + G = f(n, k, alpha, seed=42) + assert all(d == k for v, d in G.out_degree()) + + @pytest.mark.parametrize( + "f", (_random_k_out_graph_numpy, _random_k_out_graph_python) + ) + def test_no_self_loops(self, f): + """Tests for forbidding self-loops.""" + if (f == _random_k_out_graph_numpy) and not has_numpy: + pytest.skip() + n = 10 + k = 3 + alpha = 1 + G = f(n, k, alpha, self_loops=False) + assert nx.number_of_selfloops(G) == 0 + + def test_negative_alpha(self): + with pytest.raises(ValueError, match="alpha must be positive"): + random_k_out_graph(10, 3, -1) + + +class TestUniformRandomKOutGraph: + """Unit tests for the + :func:`~networkx.generators.directed.random_uniform_k_out_graph` + function. + + """ + + def test_regularity(self): + """Tests that the generated graph is `k`-out-regular.""" + n = 10 + k = 3 + G = random_uniform_k_out_graph(n, k) + assert all(d == k for v, d in G.out_degree()) + G = random_uniform_k_out_graph(n, k, seed=42) + assert all(d == k for v, d in G.out_degree()) + + def test_no_self_loops(self): + """Tests for forbidding self-loops.""" + n = 10 + k = 3 + G = random_uniform_k_out_graph(n, k, self_loops=False) + assert nx.number_of_selfloops(G) == 0 + assert all(d == k for v, d in G.out_degree()) + + def test_with_replacement(self): + n = 10 + k = 3 + G = random_uniform_k_out_graph(n, k, with_replacement=True) + assert G.is_multigraph() + assert all(d == k for v, d in G.out_degree()) + n = 10 + k = 9 + G = random_uniform_k_out_graph(n, k, with_replacement=False, self_loops=False) + assert nx.number_of_selfloops(G) == 0 + assert all(d == k for v, d in G.out_degree()) + + def test_without_replacement(self): + n = 10 + k = 3 + G = random_uniform_k_out_graph(n, k, with_replacement=False) + assert not G.is_multigraph() + assert all(d == k for v, d in G.out_degree()) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_duplication.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_duplication.py new file mode 100644 index 0000000..9b6100b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_duplication.py @@ -0,0 +1,103 @@ +"""Unit tests for the :mod:`networkx.generators.duplication` module.""" + +import pytest + +import networkx as nx + + +class TestDuplicationDivergenceGraph: + """Unit tests for the + :func:`networkx.generators.duplication.duplication_divergence_graph` + function. + + """ + + def test_final_size(self): + G = nx.duplication_divergence_graph(3, p=1) + assert len(G) == 3 + G = nx.duplication_divergence_graph(3, p=1, seed=42) + assert len(G) == 3 + + def test_probability_too_large(self): + with pytest.raises(nx.NetworkXError): + nx.duplication_divergence_graph(3, p=2) + + def test_probability_too_small(self): + with pytest.raises(nx.NetworkXError): + nx.duplication_divergence_graph(3, p=-1) + + def test_non_extreme_probability_value(self): + G = nx.duplication_divergence_graph(6, p=0.3, seed=42) + assert len(G) == 6 + assert list(G.degree()) == [(0, 2), (1, 3), (2, 2), (3, 3), (4, 1), (5, 1)] + + def test_minimum_desired_nodes(self): + with pytest.raises( + nx.NetworkXError, match=".*n must be greater than or equal to 2" + ): + nx.duplication_divergence_graph(1, p=1) + + def test_create_using(self): + class DummyGraph(nx.Graph): + pass + + class DummyDiGraph(nx.DiGraph): + pass + + G = nx.duplication_divergence_graph(6, 0.3, seed=42, create_using=DummyGraph) + assert isinstance(G, DummyGraph) + with pytest.raises(nx.NetworkXError, match="create_using must not be directed"): + nx.duplication_divergence_graph(6, 0.3, seed=42, create_using=DummyDiGraph) + + +class TestPartialDuplicationGraph: + """Unit tests for the + :func:`networkx.generators.duplication.partial_duplication_graph` + function. + + """ + + def test_final_size(self): + N = 10 + n = 5 + p = 0.5 + q = 0.5 + G = nx.partial_duplication_graph(N, n, p, q) + assert len(G) == N + G = nx.partial_duplication_graph(N, n, p, q, seed=42) + assert len(G) == N + + def test_initial_clique_size(self): + N = 10 + n = 10 + p = 0.5 + q = 0.5 + G = nx.partial_duplication_graph(N, n, p, q) + assert len(G) == n + + def test_invalid_initial_size(self): + with pytest.raises(nx.NetworkXError): + N = 5 + n = 10 + p = 0.5 + q = 0.5 + G = nx.partial_duplication_graph(N, n, p, q) + + def test_invalid_probabilities(self): + N = 1 + n = 1 + for p, q in [(0.5, 2), (0.5, -1), (2, 0.5), (-1, 0.5)]: + args = (N, n, p, q) + pytest.raises(nx.NetworkXError, nx.partial_duplication_graph, *args) + + def test_create_using(self): + class DummyGraph(nx.Graph): + pass + + class DummyDiGraph(nx.DiGraph): + pass + + G = nx.partial_duplication_graph(10, 5, 0.5, 0.5, create_using=DummyGraph) + assert isinstance(G, DummyGraph) + with pytest.raises(nx.NetworkXError, match="create_using must not be directed"): + nx.partial_duplication_graph(10, 5, 0.5, 0.5, create_using=DummyDiGraph) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_ego.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_ego.py new file mode 100644 index 0000000..f6fc779 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_ego.py @@ -0,0 +1,39 @@ +""" +ego graph +--------- +""" + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +class TestGeneratorEgo: + def test_ego(self): + G = nx.star_graph(3) + H = nx.ego_graph(G, 0) + assert nx.is_isomorphic(G, H) + G.add_edge(1, 11) + G.add_edge(2, 22) + G.add_edge(3, 33) + H = nx.ego_graph(G, 0) + assert nx.is_isomorphic(nx.star_graph(3), H) + G = nx.path_graph(3) + H = nx.ego_graph(G, 0) + assert edges_equal(H.edges(), [(0, 1)]) + H = nx.ego_graph(G, 0, undirected=True) + assert edges_equal(H.edges(), [(0, 1)]) + H = nx.ego_graph(G, 0, center=False) + assert edges_equal(H.edges(), []) + + def test_ego_distance(self): + G = nx.Graph() + G.add_edge(0, 1, weight=2, distance=1) + G.add_edge(1, 2, weight=2, distance=2) + G.add_edge(2, 3, weight=2, distance=1) + assert nodes_equal(nx.ego_graph(G, 0, radius=3).nodes(), [0, 1, 2, 3]) + eg = nx.ego_graph(G, 0, radius=3, distance="weight") + assert nodes_equal(eg.nodes(), [0, 1]) + eg = nx.ego_graph(G, 0, radius=3, distance="weight", undirected=True) + assert nodes_equal(eg.nodes(), [0, 1]) + eg = nx.ego_graph(G, 0, radius=3, distance="distance") + assert nodes_equal(eg.nodes(), [0, 1, 2]) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_expanders.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_expanders.py new file mode 100644 index 0000000..7584c28 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_expanders.py @@ -0,0 +1,171 @@ +"""Unit tests for the :mod:`networkx.generators.expanders` module.""" + +import pytest + +import networkx as nx + + +@pytest.mark.parametrize("n", (2, 3, 5, 6, 10)) +def test_margulis_gabber_galil_graph_properties(n): + g = nx.margulis_gabber_galil_graph(n) + assert g.number_of_nodes() == n * n + for node in g: + assert g.degree(node) == 8 + assert len(node) == 2 + for i in node: + assert int(i) == i + assert 0 <= i < n + + +@pytest.mark.parametrize("n", (2, 3, 5, 6, 10)) +def test_margulis_gabber_galil_graph_eigvals(n): + np = pytest.importorskip("numpy") + sp = pytest.importorskip("scipy") + + g = nx.margulis_gabber_galil_graph(n) + # Eigenvalues are already sorted using the scipy eigvalsh, + # but the implementation in numpy does not guarantee order. + w = sorted(sp.linalg.eigvalsh(nx.adjacency_matrix(g).toarray())) + assert w[-2] < 5 * np.sqrt(2) + + +@pytest.mark.parametrize("p", (3, 5, 7, 11)) # Primes +def test_chordal_cycle_graph(p): + """Test for the :func:`networkx.chordal_cycle_graph` function.""" + G = nx.chordal_cycle_graph(p) + assert len(G) == p + # TODO The second largest eigenvalue should be smaller than a constant, + # independent of the number of nodes in the graph: + # + # eigs = sorted(sp.linalg.eigvalsh(nx.adjacency_matrix(G).toarray())) + # assert_less(eigs[-2], ...) + # + + +@pytest.mark.parametrize("p", (3, 5, 7, 11, 13)) # Primes +def test_paley_graph(p): + """Test for the :func:`networkx.paley_graph` function.""" + G = nx.paley_graph(p) + # G has p nodes + assert len(G) == p + # G is (p-1)/2-regular + in_degrees = {G.in_degree(node) for node in G.nodes} + out_degrees = {G.out_degree(node) for node in G.nodes} + assert len(in_degrees) == 1 and in_degrees.pop() == (p - 1) // 2 + assert len(out_degrees) == 1 and out_degrees.pop() == (p - 1) // 2 + + # If p = 1 mod 4, -1 is a square mod 4 and therefore the + # edge in the Paley graph are symmetric. + if p % 4 == 1: + for u, v in G.edges: + assert (v, u) in G.edges + + +@pytest.mark.parametrize("d, n", [(2, 7), (4, 10), (4, 16)]) +def test_maybe_regular_expander(d, n): + pytest.importorskip("numpy") + G = nx.maybe_regular_expander(n, d, seed=1729) + + assert len(G) == n, "Should have n nodes" + assert len(G.edges) == n * d / 2, "Should have n*d/2 edges" + assert nx.is_k_regular(G, d), "Should be d-regular" + + +def test_maybe_regular_expander_max_tries(): + pytest.importorskip("numpy") + d, n = 4, 10 + msg = "Too many iterations in maybe_regular_expander" + with pytest.raises(nx.NetworkXError, match=msg): + nx.maybe_regular_expander(n, d, max_tries=100, seed=6818) # See gh-8048 + + nx.maybe_regular_expander(n, d, max_tries=130, seed=6818) + + +@pytest.mark.parametrize("n", (3, 5, 6, 10)) +def test_is_regular_expander(n): + pytest.importorskip("numpy") + pytest.importorskip("scipy") + G = nx.complete_graph(n) + + assert nx.is_regular_expander(G), "Should be a regular expander" + + +@pytest.mark.parametrize("d, n", [(2, 7), (4, 10), (4, 16), (4, 2000)]) +def test_random_regular_expander(d, n): + pytest.importorskip("numpy") + pytest.importorskip("scipy") + G = nx.random_regular_expander_graph(n, d, seed=1729) + + assert len(G) == n, "Should have n nodes" + assert len(G.edges) == n * d / 2, "Should have n*d/2 edges" + assert nx.is_k_regular(G, d), "Should be d-regular" + assert nx.is_regular_expander(G), "Should be a regular expander" + + +def test_random_regular_expander_explicit_construction(): + pytest.importorskip("numpy") + pytest.importorskip("scipy") + G = nx.random_regular_expander_graph(d=4, n=5, seed=1729) + + assert len(G) == 5 and len(G.edges) == 10, "Should be a complete graph" + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph, nx.MultiDiGraph)) +def test_margulis_gabber_galil_graph_badinput(graph_type): + with pytest.raises( + nx.NetworkXError, match="`create_using` must be an undirected multigraph" + ): + nx.margulis_gabber_galil_graph(3, create_using=graph_type) + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph, nx.MultiDiGraph)) +def test_chordal_cycle_graph_badinput(graph_type): + with pytest.raises( + nx.NetworkXError, match="`create_using` must be an undirected multigraph" + ): + nx.chordal_cycle_graph(3, create_using=graph_type) + + +def test_paley_graph_badinput(): + with pytest.raises( + nx.NetworkXError, match="`create_using` cannot be a multigraph." + ): + nx.paley_graph(3, create_using=nx.MultiGraph) + + +def test_maybe_regular_expander_badinput(): + pytest.importorskip("numpy") + + with pytest.raises(nx.NetworkXError, match="n must be a positive integer"): + nx.maybe_regular_expander(n=-1, d=2) + + with pytest.raises(nx.NetworkXError, match="d must be greater than or equal to 2"): + nx.maybe_regular_expander(n=10, d=0) + + with pytest.raises(nx.NetworkXError, match="Need n-1>= d to have room"): + nx.maybe_regular_expander(n=5, d=6) + + +def test_is_regular_expander_badinput(): + pytest.importorskip("numpy") + pytest.importorskip("scipy") + + with pytest.raises(nx.NetworkXError, match="epsilon must be non negative"): + nx.is_regular_expander(nx.Graph(), epsilon=-1) + + +def test_random_regular_expander_badinput(): + pytest.importorskip("numpy") + pytest.importorskip("scipy") + + with pytest.raises(nx.NetworkXError, match="n must be a positive integer"): + nx.random_regular_expander_graph(n=-1, d=2) + + with pytest.raises(nx.NetworkXError, match="d must be greater than or equal to 2"): + nx.random_regular_expander_graph(n=10, d=0) + + with pytest.raises(nx.NetworkXError, match="Need n-1>= d to have room"): + nx.random_regular_expander_graph(n=5, d=6) + + with pytest.raises(nx.NetworkXError, match="epsilon must be non negative"): + nx.random_regular_expander_graph(n=4, d=2, epsilon=-1) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_geometric.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_geometric.py new file mode 100644 index 0000000..fc4ce66 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_geometric.py @@ -0,0 +1,488 @@ +import math +import random +from itertools import combinations + +import pytest + +import networkx as nx + + +def l1dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + + +class TestRandomGeometricGraph: + """Unit tests for :func:`~networkx.random_geometric_graph`""" + + def test_number_of_nodes(self): + G = nx.random_geometric_graph(50, 0.25, seed=42) + assert len(G) == 50 + G = nx.random_geometric_graph(range(50), 0.25, seed=42) + assert len(G) == 50 + + def test_distances(self): + """Tests that pairs of vertices adjacent if and only if they are + within the prescribed radius. + """ + # Use the Euclidean metric, the default according to the + # documentation. + G = nx.random_geometric_graph(50, 0.25) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + # Nonadjacent vertices must be at greater distance. + else: + assert not math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_p(self): + """Tests for providing an alternate distance metric to the generator.""" + # Use the L1 metric. + G = nx.random_geometric_graph(50, 0.25, p=1) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert l1dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + # Nonadjacent vertices must be at greater distance. + else: + assert not l1dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_node_names(self): + """Tests using values other than sequential numbers as node IDs.""" + import string + + nodes = list(string.ascii_lowercase) + G = nx.random_geometric_graph(nodes, 0.25) + assert len(G) == len(nodes) + + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + # Nonadjacent vertices must be at greater distance. + else: + assert not math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_pos_name(self): + G = nx.random_geometric_graph(50, 0.25, seed=42, pos_name="coords") + assert all(len(d["coords"]) == 2 for n, d in G.nodes.items()) + + +class TestSoftRandomGeometricGraph: + """Unit tests for :func:`~networkx.soft_random_geometric_graph`""" + + def test_number_of_nodes(self): + G = nx.soft_random_geometric_graph(50, 0.25, seed=42) + assert len(G) == 50 + G = nx.soft_random_geometric_graph(range(50), 0.25, seed=42) + assert len(G) == 50 + + def test_distances(self): + """Tests that pairs of vertices adjacent if and only if they are + within the prescribed radius. + """ + # Use the Euclidean metric, the default according to the + # documentation. + G = nx.soft_random_geometric_graph(50, 0.25) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_p(self): + """Tests for providing an alternate distance metric to the generator.""" + + # Use the L1 metric. + def dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + + G = nx.soft_random_geometric_graph(50, 0.25, p=1) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_node_names(self): + """Tests using values other than sequential numbers as node IDs.""" + import string + + nodes = list(string.ascii_lowercase) + G = nx.soft_random_geometric_graph(nodes, 0.25) + assert len(G) == len(nodes) + + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_p_dist_default(self): + """Tests default p_dict = 0.5 returns graph with edge count <= RGG with + same n, radius, dim and positions + """ + nodes = 50 + dim = 2 + pos = {v: [random.random() for i in range(dim)] for v in range(nodes)} + RGG = nx.random_geometric_graph(50, 0.25, pos=pos) + SRGG = nx.soft_random_geometric_graph(50, 0.25, pos=pos) + assert len(SRGG.edges()) <= len(RGG.edges()) + + def test_p_dist_zero(self): + """Tests if p_dict = 0 returns disconnected graph with 0 edges""" + + def p_dist(dist): + return 0 + + G = nx.soft_random_geometric_graph(50, 0.25, p_dist=p_dist) + assert len(G.edges) == 0 + + def test_pos_name(self): + G = nx.soft_random_geometric_graph(50, 0.25, seed=42, pos_name="coords") + assert all(len(d["coords"]) == 2 for n, d in G.nodes.items()) + + +def join(G, u, v, theta, alpha, metric): + """Returns ``True`` if and only if the nodes whose attributes are + ``du`` and ``dv`` should be joined, according to the threshold + condition for geographical threshold graphs. + + ``G`` is an undirected NetworkX graph, and ``u`` and ``v`` are nodes + in that graph. The nodes must have node attributes ``'pos'`` and + ``'weight'``. + + ``metric`` is a distance metric. + """ + du, dv = G.nodes[u], G.nodes[v] + u_pos, v_pos = du["pos"], dv["pos"] + u_weight, v_weight = du["weight"], dv["weight"] + return (u_weight + v_weight) * metric(u_pos, v_pos) ** alpha >= theta + + +class TestGeographicalThresholdGraph: + """Unit tests for :func:`~networkx.geographical_threshold_graph`""" + + def test_number_of_nodes(self): + G = nx.geographical_threshold_graph(50, 100, seed=42) + assert len(G) == 50 + G = nx.geographical_threshold_graph(range(50), 100, seed=42) + assert len(G) == 50 + + def test_distances(self): + """Tests that pairs of vertices adjacent if and only if their + distances meet the given threshold. + """ + # Use the Euclidean metric and alpha = -2 + # the default according to the documentation. + G = nx.geographical_threshold_graph(50, 10) + for u, v in combinations(G, 2): + # Adjacent vertices must exceed the threshold. + if v in G[u]: + assert join(G, u, v, 10, -2, math.dist) + # Nonadjacent vertices must not exceed the threshold. + else: + assert not join(G, u, v, 10, -2, math.dist) + + def test_metric(self): + """Tests for providing an alternate distance metric to the generator.""" + # Use the L1 metric. + G = nx.geographical_threshold_graph(50, 10, metric=l1dist) + for u, v in combinations(G, 2): + # Adjacent vertices must exceed the threshold. + if v in G[u]: + assert join(G, u, v, 10, -2, l1dist) + # Nonadjacent vertices must not exceed the threshold. + else: + assert not join(G, u, v, 10, -2, l1dist) + + def test_p_dist_zero(self): + """Tests if p_dict = 0 returns disconnected graph with 0 edges""" + + def p_dist(dist): + return 0 + + G = nx.geographical_threshold_graph(50, 1, p_dist=p_dist) + assert len(G.edges) == 0 + + def test_pos_weight_name(self): + gtg = nx.geographical_threshold_graph + G = gtg(50, 100, seed=42, pos_name="coords", weight_name="wt") + assert all(len(d["coords"]) == 2 for n, d in G.nodes.items()) + assert all(d["wt"] > 0 for n, d in G.nodes.items()) + + +class TestWaxmanGraph: + """Unit tests for the :func:`~networkx.waxman_graph` function.""" + + def test_number_of_nodes_1(self): + G = nx.waxman_graph(50, 0.5, 0.1, seed=42) + assert len(G) == 50 + G = nx.waxman_graph(range(50), 0.5, 0.1, seed=42) + assert len(G) == 50 + + def test_number_of_nodes_2(self): + G = nx.waxman_graph(50, 0.5, 0.1, L=1) + assert len(G) == 50 + G = nx.waxman_graph(range(50), 0.5, 0.1, L=1) + assert len(G) == 50 + + def test_metric(self): + """Tests for providing an alternate distance metric to the generator.""" + # Use the L1 metric. + G = nx.waxman_graph(50, 0.5, 0.1, metric=l1dist) + assert len(G) == 50 + + def test_pos_name(self): + G = nx.waxman_graph(50, 0.5, 0.1, seed=42, pos_name="coords") + assert all(len(d["coords"]) == 2 for n, d in G.nodes.items()) + + +class TestNavigableSmallWorldGraph: + def test_navigable_small_world(self): + G = nx.navigable_small_world_graph(5, p=1, q=0, seed=42) + gg = nx.grid_2d_graph(5, 5).to_directed() + assert nx.is_isomorphic(G, gg) + + G = nx.navigable_small_world_graph(5, p=1, q=0, dim=3) + gg = nx.grid_graph([5, 5, 5]).to_directed() + assert nx.is_isomorphic(G, gg) + + G = nx.navigable_small_world_graph(5, p=1, q=0, dim=1) + gg = nx.grid_graph([5]).to_directed() + assert nx.is_isomorphic(G, gg) + + def test_invalid_diameter_value(self): + with pytest.raises(nx.NetworkXException, match=".*p must be >= 1"): + nx.navigable_small_world_graph(5, p=0, q=0, dim=1) + + def test_invalid_long_range_connections_value(self): + with pytest.raises(nx.NetworkXException, match=".*q must be >= 0"): + nx.navigable_small_world_graph(5, p=1, q=-1, dim=1) + + def test_invalid_exponent_for_decaying_probability_value(self): + with pytest.raises(nx.NetworkXException, match=".*r must be >= 0"): + nx.navigable_small_world_graph(5, p=1, q=0, r=-1, dim=1) + + def test_r_between_0_and_1(self): + """Smoke test for radius in range [0, 1]""" + # q=0 means no long-range connections + G = nx.navigable_small_world_graph(3, p=1, q=0, r=0.5, dim=2, seed=42) + expected = nx.grid_2d_graph(3, 3, create_using=nx.DiGraph) + assert nx.utils.graphs_equal(G, expected) + + @pytest.mark.parametrize("seed", range(2478, 2578, 10)) + def test_r_general_scaling(self, seed): + """The probability of adding a long-range edge scales with `1 / dist**r`, + so a navigable_small_world graph created with r < 1 should generally + result in more edges than a navigable_small_world graph with r >= 1 + (for 0 < q << n). + + N.B. this is probabilistic, so this test may not hold for all seeds.""" + G1 = nx.navigable_small_world_graph(7, q=3, r=0.5, seed=seed) + G2 = nx.navigable_small_world_graph(7, q=3, r=1, seed=seed) + G3 = nx.navigable_small_world_graph(7, q=3, r=2, seed=seed) + assert G1.number_of_edges() > G2.number_of_edges() + assert G2.number_of_edges() > G3.number_of_edges() + + +class TestThresholdedRandomGeometricGraph: + """Unit tests for :func:`~networkx.thresholded_random_geometric_graph`""" + + def test_number_of_nodes(self): + G = nx.thresholded_random_geometric_graph(50, 0.2, 0.1, seed=42) + assert len(G) == 50 + G = nx.thresholded_random_geometric_graph(range(50), 0.2, 0.1, seed=42) + assert len(G) == 50 + + def test_distances(self): + """Tests that pairs of vertices adjacent if and only if they are + within the prescribed radius. + """ + # Use the Euclidean metric, the default according to the + # documentation. + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, seed=42) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_p(self): + """Tests for providing an alternate distance metric to the generator.""" + + # Use the L1 metric. + def dist(x, y): + return sum(abs(a - b) for a, b in zip(x, y)) + + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, p=1, seed=42) + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_node_names(self): + """Tests using values other than sequential numbers as node IDs.""" + import string + + nodes = list(string.ascii_lowercase) + G = nx.thresholded_random_geometric_graph(nodes, 0.25, 0.1, seed=42) + assert len(G) == len(nodes) + + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert math.dist(G.nodes[u]["pos"], G.nodes[v]["pos"]) <= 0.25 + + def test_theta(self): + """Tests that pairs of vertices adjacent if and only if their sum + weights exceeds the threshold parameter theta. + """ + G = nx.thresholded_random_geometric_graph(50, 0.25, 0.1, seed=42) + + for u, v in combinations(G, 2): + # Adjacent vertices must be within the given distance. + if v in G[u]: + assert (G.nodes[u]["weight"] + G.nodes[v]["weight"]) >= 0.1 + + def test_pos_name(self): + trgg = nx.thresholded_random_geometric_graph + G = trgg(50, 0.25, 0.1, seed=42, pos_name="p", weight_name="wt") + assert all(len(d["p"]) == 2 for n, d in G.nodes.items()) + assert all(d["wt"] > 0 for n, d in G.nodes.items()) + + +def test_geometric_edges_pos_attribute(): + G = nx.Graph() + G.add_nodes_from( + [ + (0, {"position": (0, 0)}), + (1, {"position": (0, 1)}), + (2, {"position": (1, 0)}), + ] + ) + expected_edges = [(0, 1), (0, 2)] + assert expected_edges == nx.geometric_edges(G, radius=1, pos_name="position") + + +def test_geometric_edges_raises_no_pos(): + G = nx.path_graph(3) + msg = "all nodes. must have a '" + with pytest.raises(nx.NetworkXError, match=msg): + nx.geometric_edges(G, radius=1) + + +def test_number_of_nodes_S1(): + G = nx.geometric_soft_configuration_graph( + beta=1.5, n=100, gamma=2.7, mean_degree=10, seed=42 + ) + assert len(G) == 100 + + +def test_set_attributes_S1(): + G = nx.geometric_soft_configuration_graph( + beta=1.5, n=100, gamma=2.7, mean_degree=10, seed=42 + ) + kappas = nx.get_node_attributes(G, "kappa") + assert len(kappas) == 100 + thetas = nx.get_node_attributes(G, "theta") + assert len(thetas) == 100 + radii = nx.get_node_attributes(G, "radius") + assert len(radii) == 100 + + +def test_mean_kappas_mean_degree_S1(): + G = nx.geometric_soft_configuration_graph( + beta=2.5, n=50, gamma=2.7, mean_degree=10, seed=8023 + ) + + kappas = nx.get_node_attributes(G, "kappa") + mean_kappas = sum(kappas.values()) / len(kappas) + assert math.fabs(mean_kappas - 10) < 0.5 + + degrees = dict(G.degree()) + mean_degree = sum(degrees.values()) / len(degrees) + assert math.fabs(mean_degree - 10) < 1 + + +def test_dict_kappas_S1(): + kappas = dict.fromkeys(range(1000), 10) + G = nx.geometric_soft_configuration_graph(beta=1, kappas=kappas) + assert len(G) == 1000 + kappas = nx.get_node_attributes(G, "kappa") + assert all(kappa == 10 for kappa in kappas.values()) + + +def test_beta_clustering_S1(): + G1 = nx.geometric_soft_configuration_graph( + beta=1.5, n=100, gamma=3.5, mean_degree=10, seed=42 + ) + G2 = nx.geometric_soft_configuration_graph( + beta=3.0, n=100, gamma=3.5, mean_degree=10, seed=42 + ) + assert nx.average_clustering(G1) < nx.average_clustering(G2) + + +def test_wrong_parameters_S1(): + with pytest.raises( + nx.NetworkXError, + match="Please provide either kappas, or all 3 of: n, gamma and mean_degree.", + ): + G = nx.geometric_soft_configuration_graph( + beta=1.5, gamma=3.5, mean_degree=10, seed=42 + ) + + with pytest.raises( + nx.NetworkXError, + match="When kappas is input, n, gamma and mean_degree must not be.", + ): + kappas = dict.fromkeys(range(1000), 10) + G = nx.geometric_soft_configuration_graph( + beta=1.5, kappas=kappas, gamma=2.3, seed=42 + ) + + with pytest.raises( + nx.NetworkXError, + match="Please provide either kappas, or all 3 of: n, gamma and mean_degree.", + ): + G = nx.geometric_soft_configuration_graph(beta=1.5, seed=42) + + +def test_negative_beta_S1(): + with pytest.raises( + nx.NetworkXError, match="The parameter beta cannot be smaller or equal to 0." + ): + G = nx.geometric_soft_configuration_graph( + beta=-1, n=100, gamma=2.3, mean_degree=10, seed=42 + ) + + +def test_non_zero_clustering_beta_lower_one_S1(): + G = nx.geometric_soft_configuration_graph( + beta=0.5, n=100, gamma=3.5, mean_degree=10, seed=42 + ) + assert nx.average_clustering(G) > 0 + + +def test_mean_degree_influence_on_connectivity_S1(): + low_mean_degree = 2 + high_mean_degree = 20 + G_low = nx.geometric_soft_configuration_graph( + beta=1.2, n=100, gamma=2.7, mean_degree=low_mean_degree, seed=42 + ) + G_high = nx.geometric_soft_configuration_graph( + beta=1.2, n=100, gamma=2.7, mean_degree=high_mean_degree, seed=42 + ) + assert nx.number_connected_components(G_low) > nx.number_connected_components( + G_high + ) + + +def test_compare_mean_kappas_different_gammas_S1(): + G1 = nx.geometric_soft_configuration_graph( + beta=1.5, n=20, gamma=2.7, mean_degree=5, seed=42 + ) + G2 = nx.geometric_soft_configuration_graph( + beta=1.5, n=20, gamma=3.5, mean_degree=5, seed=42 + ) + kappas1 = nx.get_node_attributes(G1, "kappa") + mean_kappas1 = sum(kappas1.values()) / len(kappas1) + kappas2 = nx.get_node_attributes(G2, "kappa") + mean_kappas2 = sum(kappas2.values()) / len(kappas2) + assert math.fabs(mean_kappas1 - mean_kappas2) < 1 diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_harary_graph.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_harary_graph.py new file mode 100644 index 0000000..8a0142d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_harary_graph.py @@ -0,0 +1,133 @@ +"""Unit tests for the :mod:`networkx.generators.harary_graph` module.""" + +import pytest + +import networkx as nx +from networkx.algorithms.isomorphism.isomorph import is_isomorphic +from networkx.generators.harary_graph import hkn_harary_graph, hnm_harary_graph + + +class TestHararyGraph: + """ + Suppose n nodes, m >= n-1 edges, d = 2m // n, r = 2m % n + """ + + def test_hnm_harary_graph(self): + # When d is even and r = 0, the hnm_harary_graph(n,m) is + # the circulant_graph(n, list(range(1,d/2+1))) + for n, m in [(5, 5), (6, 12), (7, 14)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + G2 = nx.circulant_graph(n, list(range(1, d // 2 + 1))) + assert is_isomorphic(G1, G2) + + # When d is even and r > 0, the hnm_harary_graph(n,m) is + # the circulant_graph(n, list(range(1,d/2+1))) + # with r edges added arbitrarily + for n, m in [(5, 7), (6, 13), (7, 16)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + G2 = nx.circulant_graph(n, list(range(1, d // 2 + 1))) + assert set(G2.edges) < set(G1.edges) + assert G1.number_of_edges() == m + + # When d is odd and n is even and r = 0, the hnm_harary_graph(n,m) + # is the circulant_graph(n, list(range(1,(d+1)/2) plus [n//2]) + for n, m in [(6, 9), (8, 12), (10, 15)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) + L.append(n // 2) + G2 = nx.circulant_graph(n, L) + assert is_isomorphic(G1, G2) + + # When d is odd and n is even and r > 0, the hnm_harary_graph(n,m) + # is the circulant_graph(n, list(range(1,(d+1)/2) plus [n//2]) + # with r edges added arbitrarily + for n, m in [(6, 10), (8, 13), (10, 17)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) + L.append(n // 2) + G2 = nx.circulant_graph(n, L) + assert set(G2.edges) < set(G1.edges) + assert G1.number_of_edges() == m + + # When d is odd and n is odd, the hnm_harary_graph(n,m) is + # the circulant_graph(n, list(range(1,(d+1)/2)) + # with m - n*(d-1)/2 edges added arbitrarily + for n, m in [(5, 4), (7, 12), (9, 14)]: + G1 = hnm_harary_graph(n, m) + d = 2 * m // n + L = list(range(1, (d + 1) // 2)) + G2 = nx.circulant_graph(n, L) + assert set(G2.edges) < set(G1.edges) + assert G1.number_of_edges() == m + + # Raise NetworkXError if n<1 + n = 0 + m = 0 + pytest.raises(nx.NetworkXError, hnm_harary_graph, n, m) + + # Raise NetworkXError if m < n-1 + n = 6 + m = 4 + pytest.raises(nx.NetworkXError, hnm_harary_graph, n, m) + + # Raise NetworkXError if m > n(n-1)/2 + n = 6 + m = 16 + pytest.raises(nx.NetworkXError, hnm_harary_graph, n, m) + + """ + Suppose connectivity k, number of nodes n + """ + + def test_hkn_harary_graph(self): + # When k == 1, the hkn_harary_graph(k,n) is + # the path_graph(n) + for k, n in [(1, 6), (1, 7)]: + G1 = hkn_harary_graph(k, n) + G2 = nx.path_graph(n) + assert is_isomorphic(G1, G2) + + # When k is even, the hkn_harary_graph(k,n) is + # the circulant_graph(n, list(range(1,k/2+1))) + for k, n in [(2, 6), (2, 7), (4, 6), (4, 7)]: + G1 = hkn_harary_graph(k, n) + G2 = nx.circulant_graph(n, list(range(1, k // 2 + 1))) + assert is_isomorphic(G1, G2) + + # When k is odd and n is even, the hkn_harary_graph(k,n) is + # the circulant_graph(n, list(range(1,(k+1)/2)) plus [n/2]) + for k, n in [(3, 6), (5, 8), (7, 10)]: + G1 = hkn_harary_graph(k, n) + L = list(range(1, (k + 1) // 2)) + L.append(n // 2) + G2 = nx.circulant_graph(n, L) + assert is_isomorphic(G1, G2) + + # When k is odd and n is odd, the hkn_harary_graph(k,n) is + # the circulant_graph(n, list(range(1,(k+1)/2))) with + # n//2+1 edges added between node i and node i+n//2+1 + for k, n in [(3, 5), (5, 9), (7, 11)]: + G1 = hkn_harary_graph(k, n) + G2 = nx.circulant_graph(n, list(range(1, (k + 1) // 2))) + eSet1 = set(G1.edges) + eSet2 = set(G2.edges) + eSet3 = set() + half = n // 2 + for i in range(half + 1): + # add half+1 edges between i and i+half + eSet3.add((i, (i + half) % n)) + assert eSet1 == eSet2 | eSet3 + + # Raise NetworkXError if k<1 + k = 0 + n = 0 + pytest.raises(nx.NetworkXError, hkn_harary_graph, k, n) + + # Raise NetworkXError if ndegree_count[1]*degree_count[4] + joint_degrees_3 = { + 1: {4: 2}, + 2: {2: 2, 3: 2, 4: 2}, + 3: {2: 2, 4: 1}, + 4: {1: 2, 2: 2, 3: 1}, + } + assert not is_valid_joint_degree(joint_degrees_3) + + # test condition 5 + # joint_degrees_5[1][1] not even + joint_degrees_5 = {1: {1: 9}} + assert not is_valid_joint_degree(joint_degrees_5) + + +def test_joint_degree_graph(ntimes=10): + for _ in range(ntimes): + seed = int(time.time()) + + n, m, p = 20, 10, 1 + # generate random graph with model powerlaw_cluster and calculate + # its joint degree + g = powerlaw_cluster_graph(n, m, p, seed=seed) + joint_degrees_g = degree_mixing_dict(g, normalized=False) + + # generate simple undirected graph with given joint degree + # joint_degrees_g + G = joint_degree_graph(joint_degrees_g) + joint_degrees_G = degree_mixing_dict(G, normalized=False) + + # assert that the given joint degree is equal to the generated + # graph's joint degree + assert joint_degrees_g == joint_degrees_G + + +def test_is_valid_directed_joint_degree(): + in_degrees = [0, 1, 1, 2] + out_degrees = [1, 1, 1, 1] + nkk = {1: {1: 2, 2: 2}} + assert is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + # not realizable, values are not integers. + nkk = {1: {1: 1.5, 2: 2.5}} + assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + # not realizable, number of edges between 1-2 are insufficient. + nkk = {1: {1: 2, 2: 1}} + assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + # not realizable, in/out degree sequences have different number of nodes. + out_degrees = [1, 1, 1] + nkk = {1: {1: 2, 2: 2}} + assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + # not realizable, degree sequences have fewer than required nodes. + in_degrees = [0, 1, 2] + assert not is_valid_directed_joint_degree(in_degrees, out_degrees, nkk) + + +def test_directed_joint_degree_graph(n=15, m=100, ntimes=1000): + for _ in range(ntimes): + # generate gnm random graph and calculate its joint degree. + g = gnm_random_graph(n, m, None, directed=True) + + # in-degree sequence of g as a list of integers. + in_degrees = list(dict(g.in_degree()).values()) + # out-degree sequence of g as a list of integers. + out_degrees = list(dict(g.out_degree()).values()) + nkk = degree_mixing_dict(g) + + # generate simple directed graph with given degree sequence and joint + # degree matrix. + G = directed_joint_degree_graph(in_degrees, out_degrees, nkk) + + # assert degree sequence correctness. + assert in_degrees == list(dict(G.in_degree()).values()) + assert out_degrees == list(dict(G.out_degree()).values()) + # assert joint degree matrix correctness. + assert nkk == degree_mixing_dict(G) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_lattice.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_lattice.py new file mode 100644 index 0000000..5012324 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_lattice.py @@ -0,0 +1,246 @@ +"""Unit tests for the :mod:`networkx.generators.lattice` module.""" + +from itertools import product + +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +class TestGrid2DGraph: + """Unit tests for :func:`networkx.generators.lattice.grid_2d_graph`""" + + def test_number_of_vertices(self): + m, n = 5, 6 + G = nx.grid_2d_graph(m, n) + assert len(G) == m * n + + def test_degree_distribution(self): + m, n = 5, 6 + G = nx.grid_2d_graph(m, n) + expected_histogram = [0, 0, 4, 2 * (m + n) - 8, (m - 2) * (n - 2)] + assert nx.degree_histogram(G) == expected_histogram + + def test_directed(self): + m, n = 5, 6 + G = nx.grid_2d_graph(m, n) + H = nx.grid_2d_graph(m, n, create_using=nx.DiGraph()) + assert H.succ == G.adj + assert H.pred == G.adj + + def test_multigraph(self): + m, n = 5, 6 + G = nx.grid_2d_graph(m, n) + H = nx.grid_2d_graph(m, n, create_using=nx.MultiGraph()) + assert list(H.edges()) == list(G.edges()) + + def test_periodic(self): + G = nx.grid_2d_graph(0, 0, periodic=True) + assert dict(G.degree()) == {} + + for m, n, H in [ + (2, 2, nx.cycle_graph(4)), + (1, 7, nx.cycle_graph(7)), + (7, 1, nx.cycle_graph(7)), + (2, 5, nx.circular_ladder_graph(5)), + (5, 2, nx.circular_ladder_graph(5)), + (2, 4, nx.cubical_graph()), + (4, 2, nx.cubical_graph()), + ]: + G = nx.grid_2d_graph(m, n, periodic=True) + assert nx.could_be_isomorphic(G, H) + + def test_periodic_iterable(self): + m, n = 3, 7 + for a, b in product([0, 1], [0, 1]): + G = nx.grid_2d_graph(m, n, periodic=(a, b)) + assert G.number_of_nodes() == m * n + assert G.number_of_edges() == (m + a - 1) * n + (n + b - 1) * m + + def test_periodic_directed(self): + G = nx.grid_2d_graph(4, 2, periodic=True) + H = nx.grid_2d_graph(4, 2, periodic=True, create_using=nx.DiGraph()) + assert H.succ == G.adj + assert H.pred == G.adj + + def test_periodic_multigraph(self): + G = nx.grid_2d_graph(4, 2, periodic=True) + H = nx.grid_2d_graph(4, 2, periodic=True, create_using=nx.MultiGraph()) + assert list(G.edges()) == list(H.edges()) + + def test_exceptions(self): + pytest.raises(nx.NetworkXError, nx.grid_2d_graph, -3, 2) + pytest.raises(nx.NetworkXError, nx.grid_2d_graph, 3, -2) + pytest.raises(TypeError, nx.grid_2d_graph, 3.3, 2) + pytest.raises(TypeError, nx.grid_2d_graph, 3, 2.2) + + def test_node_input(self): + G = nx.grid_2d_graph(4, 2, periodic=True) + H = nx.grid_2d_graph(range(4), range(2), periodic=True) + assert nx.is_isomorphic(H, G) + H = nx.grid_2d_graph("abcd", "ef", periodic=True) + assert nx.is_isomorphic(H, G) + G = nx.grid_2d_graph(5, 6) + H = nx.grid_2d_graph(range(5), range(6)) + assert edges_equal(H, G) + + +class TestGridGraph: + """Unit tests for :func:`networkx.generators.lattice.grid_graph`""" + + def test_grid_graph(self): + """grid_graph([n,m]) is a connected simple graph with the + following properties: + number_of_nodes = n*m + degree_histogram = [0,0,4,2*(n+m)-8,(n-2)*(m-2)] + """ + for n, m in [(3, 5), (5, 3), (4, 5), (5, 4)]: + dim = [n, m] + g = nx.grid_graph(dim) + assert len(g) == n * m + assert nx.degree_histogram(g) == [ + 0, + 0, + 4, + 2 * (n + m) - 8, + (n - 2) * (m - 2), + ] + + for n, m in [(1, 5), (5, 1)]: + dim = [n, m] + g = nx.grid_graph(dim) + assert len(g) == n * m + assert nx.is_isomorphic(g, nx.path_graph(5)) + + # mg = nx.grid_graph([n,m], create_using=MultiGraph()) + # assert_equal(mg.edges(), g.edges()) + + def test_node_input(self): + G = nx.grid_graph([range(7, 9), range(3, 6)]) + assert len(G) == 2 * 3 + assert nx.is_isomorphic(G, nx.grid_graph([2, 3])) + + def test_periodic_iterable(self): + m, n, k = 3, 7, 5 + for a, b, c in product([0, 1], [0, 1], [0, 1]): + G = nx.grid_graph([m, n, k], periodic=(a, b, c)) + num_e = (m + a - 1) * n * k + (n + b - 1) * m * k + (k + c - 1) * m * n + assert G.number_of_nodes() == m * n * k + assert G.number_of_edges() == num_e + + +class TestHypercubeGraph: + """Unit tests for :func:`networkx.generators.lattice.hypercube_graph`""" + + def test_special_cases(self): + for n, H in [ + (0, nx.null_graph()), + (1, nx.path_graph(2)), + (2, nx.cycle_graph(4)), + (3, nx.cubical_graph()), + ]: + G = nx.hypercube_graph(n) + assert nx.could_be_isomorphic(G, H) + + def test_degree_distribution(self): + for n in range(1, 10): + G = nx.hypercube_graph(n) + expected_histogram = [0] * n + [2**n] + assert nx.degree_histogram(G) == expected_histogram + + +class TestTriangularLatticeGraph: + "Tests for :func:`networkx.generators.lattice.triangular_lattice_graph`" + + def test_lattice_points(self): + """Tests that the graph is really a triangular lattice.""" + for m, n in [(2, 3), (2, 2), (2, 1), (3, 3), (3, 2), (3, 4)]: + G = nx.triangular_lattice_graph(m, n) + N = (n + 1) // 2 + assert len(G) == (m + 1) * (1 + N) - (n % 2) * ((m + 1) // 2) + for i, j in G.nodes(): + nbrs = G[(i, j)] + if i < N: + assert (i + 1, j) in nbrs + if j < m: + assert (i, j + 1) in nbrs + if j < m and (i > 0 or j % 2) and (i < N or (j + 1) % 2): + assert (i + 1, j + 1) in nbrs or (i - 1, j + 1) in nbrs + + def test_directed(self): + """Tests for creating a directed triangular lattice.""" + G = nx.triangular_lattice_graph(3, 4, create_using=nx.Graph()) + H = nx.triangular_lattice_graph(3, 4, create_using=nx.DiGraph()) + assert H.is_directed() + for u, v in H.edges(): + assert v[1] >= u[1] + if v[1] == u[1]: + assert v[0] > u[0] + + def test_multigraph(self): + """Tests for creating a triangular lattice multigraph.""" + G = nx.triangular_lattice_graph(3, 4, create_using=nx.Graph()) + H = nx.triangular_lattice_graph(3, 4, create_using=nx.MultiGraph()) + assert list(H.edges()) == list(G.edges()) + + def test_periodic(self): + G = nx.triangular_lattice_graph(4, 6, periodic=True) + assert len(G) == 12 + assert G.size() == 36 + # all degrees are 6 + assert len([n for n, d in G.degree() if d != 6]) == 0 + G = nx.triangular_lattice_graph(5, 7, periodic=True) + TLG = nx.triangular_lattice_graph + pytest.raises(nx.NetworkXError, TLG, 2, 4, periodic=True) + pytest.raises(nx.NetworkXError, TLG, 4, 4, periodic=True) + pytest.raises(nx.NetworkXError, TLG, 2, 6, periodic=True) + + +class TestHexagonalLatticeGraph: + "Tests for :func:`networkx.generators.lattice.hexagonal_lattice_graph`" + + def test_lattice_points(self): + """Tests that the graph is really a hexagonal lattice.""" + for m, n in [(4, 5), (4, 4), (4, 3), (3, 2), (3, 3), (3, 5)]: + G = nx.hexagonal_lattice_graph(m, n) + assert len(G) == 2 * (m + 1) * (n + 1) - 2 + C_6 = nx.cycle_graph(6) + hexagons = [ + [(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)], + [(0, 2), (0, 3), (0, 4), (1, 2), (1, 3), (1, 4)], + [(1, 1), (1, 2), (1, 3), (2, 1), (2, 2), (2, 3)], + [(2, 0), (2, 1), (2, 2), (3, 0), (3, 1), (3, 2)], + [(2, 2), (2, 3), (2, 4), (3, 2), (3, 3), (3, 4)], + ] + for hexagon in hexagons: + assert nx.is_isomorphic(G.subgraph(hexagon), C_6) + + def test_directed(self): + """Tests for creating a directed hexagonal lattice.""" + G = nx.hexagonal_lattice_graph(3, 5, create_using=nx.Graph()) + H = nx.hexagonal_lattice_graph(3, 5, create_using=nx.DiGraph()) + assert H.is_directed() + pos = nx.get_node_attributes(H, "pos") + for u, v in H.edges(): + assert pos[v][1] >= pos[u][1] + if pos[v][1] == pos[u][1]: + assert pos[v][0] > pos[u][0] + + def test_multigraph(self): + """Tests for creating a hexagonal lattice multigraph.""" + G = nx.hexagonal_lattice_graph(3, 5, create_using=nx.Graph()) + H = nx.hexagonal_lattice_graph(3, 5, create_using=nx.MultiGraph()) + assert list(H.edges()) == list(G.edges()) + + def test_periodic(self): + G = nx.hexagonal_lattice_graph(4, 6, periodic=True) + assert len(G) == 48 + assert G.size() == 72 + # all degrees are 3 + assert len([n for n, d in G.degree() if d != 3]) == 0 + G = nx.hexagonal_lattice_graph(5, 8, periodic=True) + HLG = nx.hexagonal_lattice_graph + pytest.raises(nx.NetworkXError, HLG, 2, 7, periodic=True) + pytest.raises(nx.NetworkXError, HLG, 1, 4, periodic=True) + pytest.raises(nx.NetworkXError, HLG, 2, 1, periodic=True) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_line.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_line.py new file mode 100644 index 0000000..7f5454e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_line.py @@ -0,0 +1,309 @@ +import pytest + +import networkx as nx +from networkx.generators import line +from networkx.utils import edges_equal + + +class TestGeneratorLine: + def test_star(self): + G = nx.star_graph(5) + L = nx.line_graph(G) + assert nx.is_isomorphic(L, nx.complete_graph(5)) + + def test_path(self): + G = nx.path_graph(5) + L = nx.line_graph(G) + assert nx.is_isomorphic(L, nx.path_graph(4)) + + def test_cycle(self): + G = nx.cycle_graph(5) + L = nx.line_graph(G) + assert nx.is_isomorphic(L, G) + + def test_digraph1(self): + G = nx.DiGraph([(0, 1), (0, 2), (0, 3)]) + L = nx.line_graph(G) + # no edge graph, but with nodes + assert L.adj == {(0, 1): {}, (0, 2): {}, (0, 3): {}} + + def test_multigraph1(self): + G = nx.MultiGraph([(0, 1), (0, 1), (1, 0), (0, 2), (2, 0), (0, 3)]) + L = nx.line_graph(G) + # no edge graph, but with nodes + assert edges_equal( + L.edges(), + [ + ((0, 3, 0), (0, 1, 0)), + ((0, 3, 0), (0, 2, 0)), + ((0, 3, 0), (0, 2, 1)), + ((0, 3, 0), (0, 1, 1)), + ((0, 3, 0), (0, 1, 2)), + ((0, 1, 0), (0, 1, 1)), + ((0, 1, 0), (0, 2, 0)), + ((0, 1, 0), (0, 1, 2)), + ((0, 1, 0), (0, 2, 1)), + ((0, 1, 1), (0, 1, 2)), + ((0, 1, 1), (0, 2, 0)), + ((0, 1, 1), (0, 2, 1)), + ((0, 1, 2), (0, 2, 0)), + ((0, 1, 2), (0, 2, 1)), + ((0, 2, 0), (0, 2, 1)), + ], + ) + + def test_multigraph2(self): + G = nx.MultiGraph([(1, 2), (2, 1)]) + L = nx.line_graph(G) + assert edges_equal(L.edges(), [((1, 2, 0), (1, 2, 1))]) + + def test_multidigraph1(self): + G = nx.MultiDiGraph([(1, 2), (2, 1)]) + L = nx.line_graph(G) + assert edges_equal(L.edges(), [((1, 2, 0), (2, 1, 0)), ((2, 1, 0), (1, 2, 0))]) + + def test_multidigraph2(self): + G = nx.MultiDiGraph([(0, 1), (0, 1), (0, 1), (1, 2)]) + L = nx.line_graph(G) + assert edges_equal( + L.edges(), + [((0, 1, 0), (1, 2, 0)), ((0, 1, 1), (1, 2, 0)), ((0, 1, 2), (1, 2, 0))], + ) + + def test_digraph2(self): + G = nx.DiGraph([(0, 1), (1, 2), (2, 3)]) + L = nx.line_graph(G) + assert edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) + + def test_create1(self): + G = nx.DiGraph([(0, 1), (1, 2), (2, 3)]) + L = nx.line_graph(G, create_using=nx.Graph()) + assert edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) + + def test_create2(self): + G = nx.Graph([(0, 1), (1, 2), (2, 3)]) + L = nx.line_graph(G, create_using=nx.DiGraph()) + assert edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) + + +class TestGeneratorInverseLine: + def test_example(self): + G = nx.Graph() + G_edges = [ + [1, 2], + [1, 3], + [1, 4], + [1, 5], + [2, 3], + [2, 5], + [2, 6], + [2, 7], + [3, 4], + [3, 5], + [6, 7], + [6, 8], + [7, 8], + ] + G.add_edges_from(G_edges) + H = nx.inverse_line_graph(G) + solution = nx.Graph() + solution_edges = [ + ("a", "b"), + ("a", "c"), + ("a", "d"), + ("a", "e"), + ("c", "d"), + ("e", "f"), + ("e", "g"), + ("f", "g"), + ] + solution.add_edges_from(solution_edges) + assert nx.is_isomorphic(H, solution) + + def test_example_2(self): + G = nx.Graph() + G_edges = [[1, 2], [1, 3], [2, 3], [3, 4], [3, 5], [4, 5]] + G.add_edges_from(G_edges) + H = nx.inverse_line_graph(G) + solution = nx.Graph() + solution_edges = [("a", "c"), ("b", "c"), ("c", "d"), ("d", "e"), ("d", "f")] + solution.add_edges_from(solution_edges) + assert nx.is_isomorphic(H, solution) + + def test_pair(self): + G = nx.path_graph(2) + H = nx.inverse_line_graph(G) + solution = nx.path_graph(3) + assert nx.is_isomorphic(H, solution) + + def test_line(self): + G = nx.path_graph(5) + solution = nx.path_graph(6) + H = nx.inverse_line_graph(G) + assert nx.is_isomorphic(H, solution) + + def test_triangle_graph(self): + G = nx.complete_graph(3) + H = nx.inverse_line_graph(G) + alternative_solution = nx.Graph() + alternative_solution.add_edges_from([[0, 1], [0, 2], [0, 3]]) + # there are two alternative inverse line graphs for this case + # so long as we get one of them the test should pass + assert nx.is_isomorphic(H, G) or nx.is_isomorphic(H, alternative_solution) + + def test_cycle(self): + G = nx.cycle_graph(5) + H = nx.inverse_line_graph(G) + assert nx.is_isomorphic(H, G) + + def test_empty(self): + G = nx.Graph() + H = nx.inverse_line_graph(G) + assert nx.is_isomorphic(H, nx.complete_graph(1)) + + def test_K1(self): + G = nx.complete_graph(1) + H = nx.inverse_line_graph(G) + solution = nx.path_graph(2) + assert nx.is_isomorphic(H, solution) + + def test_edgeless_graph(self): + G = nx.empty_graph(5) + with pytest.raises(nx.NetworkXError, match="edgeless graph"): + nx.inverse_line_graph(G) + + def test_selfloops_error(self): + G = nx.cycle_graph(4) + G.add_edge(0, 0) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + def test_non_line_graphs(self): + # Tests several known non-line graphs for impossibility + # Adapted from L.W.Beineke, "Characterizations of derived graphs" + + # claw graph + claw = nx.star_graph(3) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, claw) + + # wheel graph with 6 nodes + wheel = nx.wheel_graph(6) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, wheel) + + # K5 with one edge remove + K5m = nx.complete_graph(5) + K5m.remove_edge(0, 1) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, K5m) + + # graph without any odd triangles (contains claw as induced subgraph) + G = nx.compose(nx.path_graph(2), nx.complete_bipartite_graph(2, 3)) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + ## Variations on a diamond graph + + # Diamond + 2 edges (+ "roof") + G = nx.diamond_graph() + G.add_edges_from([(4, 0), (5, 3)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + G.add_edge(4, 5) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + # Diamond + 2 connected edges + G = nx.diamond_graph() + G.add_edges_from([(4, 0), (4, 3)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + # Diamond + K3 + one edge (+ 2*K3) + G = nx.diamond_graph() + G.add_edges_from([(4, 0), (4, 1), (4, 2), (5, 3)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + G.add_edges_from([(5, 1), (5, 2)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + # 4 triangles + G = nx.diamond_graph() + G.add_edges_from([(4, 0), (4, 1), (5, 2), (5, 3)]) + pytest.raises(nx.NetworkXError, nx.inverse_line_graph, G) + + def test_wrong_graph_type(self): + G = nx.DiGraph() + G_edges = [[0, 1], [0, 2], [0, 3]] + G.add_edges_from(G_edges) + pytest.raises(nx.NetworkXNotImplemented, nx.inverse_line_graph, G) + + G = nx.MultiGraph() + G_edges = [[0, 1], [0, 2], [0, 3]] + G.add_edges_from(G_edges) + pytest.raises(nx.NetworkXNotImplemented, nx.inverse_line_graph, G) + + def test_line_inverse_line_complete(self): + G = nx.complete_graph(10) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_path(self): + G = nx.path_graph(10) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_hypercube(self): + G = nx.hypercube_graph(5) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_cycle(self): + G = nx.cycle_graph(10) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_star(self): + G = nx.star_graph(20) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_multipartite(self): + G = nx.complete_multipartite_graph(3, 4, 5) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_inverse_line_dgm(self): + G = nx.dorogovtsev_goltsev_mendes_graph(4) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + def test_line_different_node_types(self): + G = nx.path_graph([1, 2, 3, "a", "b", "c"]) + H = nx.line_graph(G) + J = nx.inverse_line_graph(H) + assert nx.is_isomorphic(G, J) + + +class TestGeneratorPrivateFunctions: + def test_triangles_error(self): + G = nx.diamond_graph() + pytest.raises(nx.NetworkXError, line._triangles, G, (4, 0)) + pytest.raises(nx.NetworkXError, line._triangles, G, (0, 3)) + + def test_odd_triangles_error(self): + G = nx.diamond_graph() + pytest.raises(nx.NetworkXError, line._odd_triangle, G, (0, 1, 4)) + pytest.raises(nx.NetworkXError, line._odd_triangle, G, (0, 1, 3)) + + def test_select_starting_cell_error(self): + G = nx.diamond_graph() + pytest.raises(nx.NetworkXError, line._select_starting_cell, G, (4, 0)) + pytest.raises(nx.NetworkXError, line._select_starting_cell, G, (0, 3)) + + def test_diamond_graph(self): + G = nx.diamond_graph() + for edge in G.edges: + cell = line._select_starting_cell(G, starting_edge=edge) + # Starting cell should always be one of the two triangles + assert len(cell) == 3 + assert all(v in G[u] for u in cell for v in cell if u != v) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_mycielski.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_mycielski.py new file mode 100644 index 0000000..eb12b14 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_mycielski.py @@ -0,0 +1,30 @@ +"""Unit tests for the :mod:`networkx.generators.mycielski` module.""" + +import pytest + +import networkx as nx + + +class TestMycielski: + def test_construction(self): + G = nx.path_graph(2) + M = nx.mycielskian(G) + assert nx.is_isomorphic(M, nx.cycle_graph(5)) + + def test_size(self): + G = nx.path_graph(2) + M = nx.mycielskian(G, 2) + assert len(M) == 11 + assert M.size() == 20 + + def test_mycielski_graph_generator(self): + G = nx.mycielski_graph(1) + assert nx.is_isomorphic(G, nx.empty_graph(1)) + G = nx.mycielski_graph(2) + assert nx.is_isomorphic(G, nx.path_graph(2)) + G = nx.mycielski_graph(3) + assert nx.is_isomorphic(G, nx.cycle_graph(5)) + G = nx.mycielski_graph(4) + assert nx.is_isomorphic(G, nx.mycielskian(nx.cycle_graph(5))) + with pytest.raises(nx.NetworkXError, match="must satisfy n >= 1"): + nx.mycielski_graph(0) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_nonisomorphic_trees.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_nonisomorphic_trees.py new file mode 100644 index 0000000..8bfd12f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_nonisomorphic_trees.py @@ -0,0 +1,56 @@ +""" +Unit tests for WROM algorithm generator in generators/nonisomorphic_trees.py +""" + +import pytest + +import networkx as nx +from networkx.utils import edges_equal + + +class TestGeneratorNonIsomorphicTrees: + def test_tree_structure(self): + # test for tree structure for nx.nonisomorphic_trees() + def f(x): + return list(nx.nonisomorphic_trees(x)) + + for i in f(6): + assert nx.is_tree(i) + for i in f(8): + assert nx.is_tree(i) + + def test_nonisomorphism(self): + # test for nonisomorphism of trees for nx.nonisomorphic_trees() + def f(x): + return list(nx.nonisomorphic_trees(x)) + + trees = f(6) + for i in range(len(trees)): + for j in range(i + 1, len(trees)): + assert not nx.is_isomorphic(trees[i], trees[j]) + trees = f(8) + for i in range(len(trees)): + for j in range(i + 1, len(trees)): + assert not nx.is_isomorphic(trees[i], trees[j]) + + def test_number_of_nonisomorphic_trees(self): + # http://oeis.org/A000055 + assert nx.number_of_nonisomorphic_trees(2) == 1 + assert nx.number_of_nonisomorphic_trees(3) == 1 + assert nx.number_of_nonisomorphic_trees(4) == 2 + assert nx.number_of_nonisomorphic_trees(5) == 3 + assert nx.number_of_nonisomorphic_trees(6) == 6 + assert nx.number_of_nonisomorphic_trees(7) == 11 + assert nx.number_of_nonisomorphic_trees(8) == 23 + assert nx.number_of_nonisomorphic_trees(9) == 47 + assert nx.number_of_nonisomorphic_trees(10) == 106 + assert nx.number_of_nonisomorphic_trees(20) == 823065 + assert nx.number_of_nonisomorphic_trees(30) == 14830871802 + + def test_nonisomorphic_trees(self): + def f(x): + return list(nx.nonisomorphic_trees(x)) + + assert edges_equal(f(3)[0].edges(), [(0, 1), (0, 2)]) + assert edges_equal(f(4)[0].edges(), [(0, 1), (0, 3), (1, 2)]) + assert edges_equal(f(4)[1].edges(), [(0, 1), (0, 2), (0, 3)]) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_random_clustered.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_random_clustered.py new file mode 100644 index 0000000..8506652 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_random_clustered.py @@ -0,0 +1,33 @@ +import pytest + +import networkx as nx + + +class TestRandomClusteredGraph: + def test_custom_joint_degree_sequence(self): + node = [1, 1, 1, 2, 1, 2, 0, 0] + tri = [0, 0, 0, 0, 0, 1, 1, 1] + joint_degree_sequence = zip(node, tri) + G = nx.random_clustered_graph(joint_degree_sequence) + assert G.number_of_nodes() == 8 + assert G.number_of_edges() == 7 + + def test_tuple_joint_degree_sequence(self): + G = nx.random_clustered_graph([(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)]) + assert G.number_of_nodes() == 6 + assert G.number_of_edges() == 10 + + def test_invalid_joint_degree_sequence_type(self): + with pytest.raises(nx.NetworkXError, match="Invalid degree sequence"): + nx.random_clustered_graph([[1, 1], [2, 1], [0, 1]]) + + def test_invalid_joint_degree_sequence_value(self): + with pytest.raises(nx.NetworkXError, match="Invalid degree sequence"): + nx.random_clustered_graph([[1, 1], [1, 2], [0, 1]]) + + def test_directed_graph_raises_error(self): + with pytest.raises(nx.NetworkXError, match="Directed Graph not supported"): + nx.random_clustered_graph( + [(1, 2), (2, 1), (1, 1), (1, 1), (1, 1), (2, 0)], + create_using=nx.DiGraph, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_random_graphs.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_random_graphs.py new file mode 100644 index 0000000..3262e54 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_random_graphs.py @@ -0,0 +1,478 @@ +"""Unit tests for the :mod:`networkx.generators.random_graphs` module.""" + +import pytest + +import networkx as nx + +_gnp_generators = [ + nx.gnp_random_graph, + nx.fast_gnp_random_graph, + nx.binomial_graph, + nx.erdos_renyi_graph, +] + + +@pytest.mark.parametrize("generator", _gnp_generators) +@pytest.mark.parametrize("directed", (True, False)) +def test_gnp_generators_negative_edge_probability(generator, directed): + """If the edge probability `p` is <=0, the resulting graph should have no edges.""" + G = generator(10, -1.1, directed=directed) + assert len(G) == 10 + assert G.number_of_edges() == 0 + assert G.is_directed() == directed + + +@pytest.mark.parametrize("generator", _gnp_generators) +@pytest.mark.parametrize( + ("directed", "expected_num_edges"), + [(False, 45), (True, 90)], +) +def test_gnp_generators_greater_than_1_edge_probability( + generator, directed, expected_num_edges +): + """If the edge probability `p` is >=1, the resulting graph should be complete.""" + G = generator(10, 1.1, directed=directed) + assert len(G) == 10 + assert G.number_of_edges() == expected_num_edges + assert G.is_directed() == directed + + +@pytest.mark.parametrize("generator", _gnp_generators) +@pytest.mark.parametrize("directed", (True, False)) +def test_gnp_generators_basic(generator, directed): + """If the edge probability `p` is >0 and <1, test only the basic properties.""" + G = generator(10, 0.1, directed=directed) + assert len(G) == 10 + assert G.is_directed() == directed + + +@pytest.mark.parametrize("generator", _gnp_generators) +def test_gnp_generators_for_p_close_to_1(generator): + """If the edge probability `p` is close to 1, the resulting graph should have all edges.""" + runs = 100 + edges = sum( + generator(10, 0.99999, directed=True).number_of_edges() for _ in range(runs) + ) + assert abs(edges / float(runs) - 90) <= runs * 2.0 / 100 + + +@pytest.mark.parametrize("generator", _gnp_generators) +@pytest.mark.parametrize("p", (0.2, 0.8)) +@pytest.mark.parametrize("directed", (True, False)) +def test_gnp_generators_edge_probability(generator, p, directed): + """Test that gnp generators generate edges according to the their probability `p`.""" + runs = 5000 + n = 5 + edge_counts = [[0] * n for _ in range(n)] + for i in range(runs): + G = generator(n, p, directed=directed) + for v, w in G.edges: + edge_counts[v][w] += 1 + if not directed: + edge_counts[w][v] += 1 + for v in range(n): + for w in range(n): + if v == w: + # There should be no loops + assert edge_counts[v][w] == 0 + else: + # Each edge should have been generated with probability close to p + assert abs(edge_counts[v][w] / float(runs) - p) <= 0.03 + + +@pytest.mark.parametrize( + "generator", [nx.gnp_random_graph, nx.binomial_graph, nx.erdos_renyi_graph] +) +@pytest.mark.parametrize( + ("seed", "directed", "expected_num_edges"), + [(42, False, 1219), (42, True, 2454), (314, False, 1247), (314, True, 2476)], +) +def test_gnp_random_graph_aliases(generator, seed, directed, expected_num_edges): + """Test that aliases give the same result with the same seed.""" + G = generator(100, 0.25, seed=seed, directed=directed) + assert len(G) == 100 + assert G.number_of_edges() == expected_num_edges + assert G.is_directed() == directed + + +class TestGeneratorsRandom: + def test_random_graph(self): + seed = 42 + G = nx.gnm_random_graph(100, 20, seed) + G = nx.gnm_random_graph(100, 20, seed, directed=True) + G = nx.dense_gnm_random_graph(100, 20, seed) + + G = nx.barabasi_albert_graph(100, 1, seed) + G = nx.barabasi_albert_graph(100, 3, seed) + assert G.number_of_edges() == (97 * 3) + + G = nx.barabasi_albert_graph(100, 3, seed, nx.complete_graph(5)) + assert G.number_of_edges() == (10 + 95 * 3) + + G = nx.extended_barabasi_albert_graph(100, 1, 0, 0, seed) + assert G.number_of_edges() == 99 + G = nx.extended_barabasi_albert_graph(100, 3, 0, 0, seed) + assert G.number_of_edges() == 97 * 3 + G = nx.extended_barabasi_albert_graph(100, 1, 0, 0.5, seed) + assert G.number_of_edges() == 99 + G = nx.extended_barabasi_albert_graph(100, 2, 0.5, 0, seed) + assert G.number_of_edges() > 100 * 3 + assert G.number_of_edges() < 100 * 4 + + G = nx.extended_barabasi_albert_graph(100, 2, 0.3, 0.3, seed) + assert G.number_of_edges() > 100 * 2 + assert G.number_of_edges() < 100 * 4 + + G = nx.powerlaw_cluster_graph(100, 1, 1.0, seed) + G = nx.powerlaw_cluster_graph(100, 3, 0.0, seed) + assert G.number_of_edges() == (97 * 3) + + G = nx.random_regular_graph(10, 20, seed) + + pytest.raises(nx.NetworkXError, nx.random_regular_graph, 3, 21) + pytest.raises(nx.NetworkXError, nx.random_regular_graph, 33, 21) + + constructor = [(10, 20, 0.8), (20, 40, 0.8)] + G = nx.random_shell_graph(constructor, seed) + + def is_caterpillar(g): + """ + A tree is a caterpillar iff all nodes of degree >=3 are surrounded + by at most two nodes of degree two or greater. + ref: http://mathworld.wolfram.com/CaterpillarGraph.html + """ + deg_over_3 = [n for n in g if g.degree(n) >= 3] + for n in deg_over_3: + nbh_deg_over_2 = [nbh for nbh in g.neighbors(n) if g.degree(nbh) >= 2] + if not len(nbh_deg_over_2) <= 2: + return False + return True + + def is_lobster(g): + """ + A tree is a lobster if it has the property that the removal of leaf + nodes leaves a caterpillar graph (Gallian 2007) + ref: http://mathworld.wolfram.com/LobsterGraph.html + """ + non_leafs = [n for n in g if g.degree(n) > 1] + return is_caterpillar(g.subgraph(non_leafs)) + + G = nx.random_lobster(10, 0.1, 0.5, seed) + assert max(G.degree(n) for n in G.nodes()) > 3 + assert is_lobster(G) + pytest.raises(nx.NetworkXError, nx.random_lobster, 10, 0.1, 1, seed) + pytest.raises(nx.NetworkXError, nx.random_lobster, 10, 1, 1, seed) + pytest.raises(nx.NetworkXError, nx.random_lobster, 10, 1, 0.5, seed) + + # docstring says this should be a caterpillar + G = nx.random_lobster(10, 0.1, 0.0, seed) + assert is_caterpillar(G) + + # difficult to find seed that requires few tries + seq = nx.random_powerlaw_tree_sequence(10, 3, seed=14, tries=1) + G = nx.random_powerlaw_tree(10, 3, seed=14, tries=1) + + def test_dual_barabasi_albert(self, m1=1, m2=4, p=0.5): + """ + Tests that the dual BA random graph generated behaves consistently. + + Tests the exceptions are raised as expected. + + The graphs generation are repeated several times to prevent lucky shots + + """ + seeds = [42, 314, 2718] + initial_graph = nx.complete_graph(10) + + for seed in seeds: + # This should be BA with m = m1 + BA1 = nx.barabasi_albert_graph(100, m1, seed) + DBA1 = nx.dual_barabasi_albert_graph(100, m1, m2, 1, seed) + assert BA1.edges() == DBA1.edges() + + # This should be BA with m = m2 + BA2 = nx.barabasi_albert_graph(100, m2, seed) + DBA2 = nx.dual_barabasi_albert_graph(100, m1, m2, 0, seed) + assert BA2.edges() == DBA2.edges() + + BA3 = nx.barabasi_albert_graph(100, m1, seed) + DBA3 = nx.dual_barabasi_albert_graph(100, m1, m1, p, seed) + # We can't compare edges here since randomness is "consumed" when drawing + # between m1 and m2 + assert BA3.size() == DBA3.size() + + DBA = nx.dual_barabasi_albert_graph(100, m1, m2, p, seed, initial_graph) + BA1 = nx.barabasi_albert_graph(100, m1, seed, initial_graph) + BA2 = nx.barabasi_albert_graph(100, m2, seed, initial_graph) + assert ( + min(BA1.size(), BA2.size()) <= DBA.size() <= max(BA1.size(), BA2.size()) + ) + + # Testing exceptions + dbag = nx.dual_barabasi_albert_graph + pytest.raises(nx.NetworkXError, dbag, m1, m1, m2, 0) + pytest.raises(nx.NetworkXError, dbag, m2, m1, m2, 0) + pytest.raises(nx.NetworkXError, dbag, 100, m1, m2, -0.5) + pytest.raises(nx.NetworkXError, dbag, 100, m1, m2, 1.5) + initial = nx.complete_graph(max(m1, m2) - 1) + pytest.raises(nx.NetworkXError, dbag, 100, m1, m2, p, initial_graph=initial) + + def test_extended_barabasi_albert(self, m=2): + """ + Tests that the extended BA random graph generated behaves consistently. + + Tests the exceptions are raised as expected. + + The graphs generation are repeated several times to prevent lucky-shots + + """ + seeds = [42, 314, 2718] + + for seed in seeds: + BA_model = nx.barabasi_albert_graph(100, m, seed) + BA_model_edges = BA_model.number_of_edges() + + # This behaves just like BA, the number of edges must be the same + G1 = nx.extended_barabasi_albert_graph(100, m, 0, 0, seed) + assert G1.size() == BA_model_edges + + # More than twice more edges should have been added + G1 = nx.extended_barabasi_albert_graph(100, m, 0.8, 0, seed) + assert G1.size() > BA_model_edges * 2 + + # Only edge rewiring, so the number of edges less than original + G2 = nx.extended_barabasi_albert_graph(100, m, 0, 0.8, seed) + assert G2.size() == BA_model_edges + + # Mixed scenario: less edges than G1 and more edges than G2 + G3 = nx.extended_barabasi_albert_graph(100, m, 0.3, 0.3, seed) + assert G3.size() > G2.size() + assert G3.size() < G1.size() + + # Testing exceptions + ebag = nx.extended_barabasi_albert_graph + pytest.raises(nx.NetworkXError, ebag, m, m, 0, 0) + pytest.raises(nx.NetworkXError, ebag, 1, 0.5, 0, 0) + pytest.raises(nx.NetworkXError, ebag, 100, 2, 0.5, 0.5) + + def test_random_zero_regular_graph(self): + """Tests that a 0-regular graph has the correct number of nodes and + edges. + + """ + seed = 42 + G = nx.random_regular_graph(0, 10, seed) + assert len(G) == 10 + assert G.number_of_edges() == 0 + + def test_gnm(self): + G = nx.gnm_random_graph(10, 3) + assert len(G) == 10 + assert G.number_of_edges() == 3 + + G = nx.gnm_random_graph(10, 3, seed=42) + assert len(G) == 10 + assert G.number_of_edges() == 3 + + G = nx.gnm_random_graph(10, 100) + assert len(G) == 10 + assert G.number_of_edges() == 45 + + G = nx.gnm_random_graph(10, 100, directed=True) + assert len(G) == 10 + assert G.number_of_edges() == 90 + + G = nx.gnm_random_graph(10, -1.1) + assert len(G) == 10 + assert G.number_of_edges() == 0 + + def test_watts_strogatz_big_k(self): + # Test to make sure than n <= k + pytest.raises(nx.NetworkXError, nx.watts_strogatz_graph, 10, 11, 0.25) + pytest.raises(nx.NetworkXError, nx.newman_watts_strogatz_graph, 10, 11, 0.25) + + # could create an infinite loop, now doesn't + # infinite loop used to occur when a node has degree n-1 and needs to rewire + nx.watts_strogatz_graph(10, 9, 0.25, seed=0) + nx.newman_watts_strogatz_graph(10, 9, 0.5, seed=0) + + # Test k==n scenario + nx.watts_strogatz_graph(10, 10, 0.25, seed=0) + nx.newman_watts_strogatz_graph(10, 10, 0.25, seed=0) + + def test_random_kernel_graph(self): + def integral(u, w, z): + return c * (z - w) + + def root(u, w, r): + return r / c + w + + c = 1 + graph = nx.random_kernel_graph(1000, integral, root) + graph = nx.random_kernel_graph(1000, integral, root, seed=42) + assert len(graph) == 1000 + + +@pytest.mark.parametrize( + ("k", "expected_num_nodes", "expected_num_edges"), + [ + (2, 10, 10), + (4, 10, 20), + ], +) +def test_watts_strogatz(k, expected_num_nodes, expected_num_edges): + G = nx.watts_strogatz_graph(10, k, 0.25, seed=42) + assert len(G) == expected_num_nodes + assert G.number_of_edges() == expected_num_edges + + +def test_newman_watts_strogatz_zero_probability(): + G = nx.newman_watts_strogatz_graph(10, 2, 0.0, seed=42) + assert len(G) == 10 + assert G.number_of_edges() == 10 + + +def test_newman_watts_strogatz_nonzero_probability(): + G = nx.newman_watts_strogatz_graph(10, 4, 0.25, seed=42) + assert len(G) == 10 + assert G.number_of_edges() >= 20 + + +def test_connected_watts_strogatz(): + G = nx.connected_watts_strogatz_graph(10, 2, 0.1, tries=10, seed=42) + assert len(G) == 10 + assert G.number_of_edges() == 10 + + +def test_connected_watts_strogatz_zero_tries(): + with pytest.raises(nx.NetworkXError, match="Maximum number of tries exceeded"): + nx.connected_watts_strogatz_graph(10, 2, 0.1, tries=0) + + +@pytest.mark.parametrize( + "generator, kwargs", + [ + (nx.fast_gnp_random_graph, {"n": 20, "p": 0.2, "directed": False}), + (nx.fast_gnp_random_graph, {"n": 20, "p": 0.2, "directed": True}), + (nx.gnp_random_graph, {"n": 20, "p": 0.2, "directed": False}), + (nx.gnp_random_graph, {"n": 20, "p": 0.2, "directed": True}), + (nx.dense_gnm_random_graph, {"n": 30, "m": 4}), + (nx.gnm_random_graph, {"n": 30, "m": 4, "directed": False}), + (nx.gnm_random_graph, {"n": 30, "m": 4, "directed": True}), + (nx.newman_watts_strogatz_graph, {"n": 50, "k": 5, "p": 0.1}), + (nx.watts_strogatz_graph, {"n": 50, "k": 5, "p": 0.1}), + (nx.connected_watts_strogatz_graph, {"n": 50, "k": 5, "p": 0.1}), + (nx.random_regular_graph, {"d": 5, "n": 20}), + (nx.barabasi_albert_graph, {"n": 40, "m": 3}), + (nx.dual_barabasi_albert_graph, {"n": 40, "m1": 3, "m2": 2, "p": 0.1}), + (nx.extended_barabasi_albert_graph, {"n": 40, "m": 3, "p": 0.1, "q": 0.2}), + (nx.powerlaw_cluster_graph, {"n": 40, "m": 3, "p": 0.1}), + (nx.random_lobster, {"n": 40, "p1": 0.1, "p2": 0.2}), + (nx.random_shell_graph, {"constructor": [(10, 20, 0.8), (20, 40, 0.8)]}), + (nx.random_powerlaw_tree, {"n": 10, "seed": 14, "tries": 1}), + ( + nx.random_kernel_graph, + { + "n": 10, + "kernel_integral": lambda u, w, z: z - w, + "kernel_root": lambda u, w, r: r + w, + }, + ), + ], +) +@pytest.mark.parametrize("create_using_instance", [False, True]) +def test_create_using(generator, kwargs, create_using_instance): + class DummyGraph(nx.Graph): + pass + + class DummyDiGraph(nx.DiGraph): + pass + + create_using_type = DummyDiGraph if kwargs.get("directed") else DummyGraph + create_using = create_using_type() if create_using_instance else create_using_type + graph = generator(**kwargs, create_using=create_using) + assert isinstance(graph, create_using_type) + + +@pytest.mark.parametrize("directed", [True, False]) +@pytest.mark.parametrize("fn", (nx.fast_gnp_random_graph, nx.gnp_random_graph)) +def test_gnp_fns_disallow_multigraph(fn, directed): + with pytest.raises(nx.NetworkXError, match="must not be a multi-graph"): + fn(20, 0.2, create_using=nx.MultiGraph) + + +@pytest.mark.parametrize("fn", (nx.gnm_random_graph, nx.dense_gnm_random_graph)) +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_gnm_fns_disallow_directed_and_multigraph(fn, graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + fn(10, 20, create_using=graphtype) + + +@pytest.mark.parametrize( + "fn", + ( + nx.newman_watts_strogatz_graph, + nx.watts_strogatz_graph, + nx.connected_watts_strogatz_graph, + ), +) +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_watts_strogatz_disallow_directed_and_multigraph(fn, graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + fn(10, 2, 0.2, create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_random_regular_graph_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.random_regular_graph(2, 10, create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_barabasi_albert_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.barabasi_albert_graph(10, 3, create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_dual_barabasi_albert_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.dual_barabasi_albert_graph(10, 2, 1, 0.4, create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_extended_barabasi_albert_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.extended_barabasi_albert_graph(10, 2, 0.2, 0.3, create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_powerlaw_cluster_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.powerlaw_cluster_graph(10, 5, 0.2, create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_random_lobster_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.random_lobster(10, 0.1, 0.1, create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_random_shell_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.random_shell_graph([(10, 20, 2), (10, 20, 5)], create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_random_powerlaw_tree_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.random_powerlaw_tree(10, create_using=graphtype) + + +@pytest.mark.parametrize("graphtype", (nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)) +def test_random_kernel_disallow_directed_and_multigraph(graphtype): + with pytest.raises(nx.NetworkXError, match="must not be"): + nx.random_kernel_graph( + 10, lambda y, a, b: a + b, lambda u, w, r: r + w, create_using=graphtype + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_small.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_small.py new file mode 100644 index 0000000..bd65be4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_small.py @@ -0,0 +1,202 @@ +import pytest + +import networkx as nx + +null = nx.null_graph() + + +class TestGeneratorsSmall: + def test__LCF_graph(self): + # If n<=0, then return the null_graph + G = nx.LCF_graph(-10, [1, 2], 100) + assert nx.could_be_isomorphic(G, null) + G = nx.LCF_graph(0, [1, 2], 3) + assert nx.could_be_isomorphic(G, null) + G = nx.LCF_graph(0, [1, 2], 10) + assert nx.could_be_isomorphic(G, null) + + # Test that LCF(n,[],0) == cycle_graph(n) + for a, b, c in [(5, [], 0), (10, [], 0), (5, [], 1), (10, [], 10)]: + G = nx.LCF_graph(a, b, c) + assert nx.could_be_isomorphic(G, nx.cycle_graph(a)) + + # Generate the utility graph K_{3,3} + G = nx.LCF_graph(6, [3, -3], 3) + utility_graph = nx.complete_bipartite_graph(3, 3) + assert nx.could_be_isomorphic(G, utility_graph) + + with pytest.raises(nx.NetworkXError, match="Directed Graph not supported"): + G = nx.LCF_graph(6, [3, -3], 3, create_using=nx.DiGraph) + + def test_properties_named_small_graphs(self): + G = nx.bull_graph() + assert sorted(G) == list(range(5)) + assert G.number_of_edges() == 5 + assert sorted(d for n, d in G.degree()) == [1, 1, 2, 3, 3] + assert nx.diameter(G) == 3 + assert nx.radius(G) == 2 + + G = nx.chvatal_graph() + assert sorted(G) == list(range(12)) + assert G.number_of_edges() == 24 + assert [d for n, d in G.degree()] == 12 * [4] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.cubical_graph() + assert sorted(G) == list(range(8)) + assert G.number_of_edges() == 12 + assert [d for n, d in G.degree()] == 8 * [3] + assert nx.diameter(G) == 3 + assert nx.radius(G) == 3 + + G = nx.desargues_graph() + assert sorted(G) == list(range(20)) + assert G.number_of_edges() == 30 + assert [d for n, d in G.degree()] == 20 * [3] + + G = nx.diamond_graph() + assert sorted(G) == list(range(4)) + assert sorted(d for n, d in G.degree()) == [2, 2, 3, 3] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 1 + + G = nx.dodecahedral_graph() + assert sorted(G) == list(range(20)) + assert G.number_of_edges() == 30 + assert [d for n, d in G.degree()] == 20 * [3] + assert nx.diameter(G) == 5 + assert nx.radius(G) == 5 + + G = nx.frucht_graph() + assert sorted(G) == list(range(12)) + assert G.number_of_edges() == 18 + assert [d for n, d in G.degree()] == 12 * [3] + assert nx.diameter(G) == 4 + assert nx.radius(G) == 3 + + G = nx.heawood_graph() + assert sorted(G) == list(range(14)) + assert G.number_of_edges() == 21 + assert [d for n, d in G.degree()] == 14 * [3] + assert nx.diameter(G) == 3 + assert nx.radius(G) == 3 + + G = nx.hoffman_singleton_graph() + assert sorted(G) == list(range(50)) + assert G.number_of_edges() == 175 + assert [d for n, d in G.degree()] == 50 * [7] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.house_graph() + assert sorted(G) == list(range(5)) + assert G.number_of_edges() == 6 + assert sorted(d for n, d in G.degree()) == [2, 2, 2, 3, 3] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.house_x_graph() + assert sorted(G) == list(range(5)) + assert G.number_of_edges() == 8 + assert sorted(d for n, d in G.degree()) == [2, 3, 3, 4, 4] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 1 + + G = nx.icosahedral_graph() + assert sorted(G) == list(range(12)) + assert G.number_of_edges() == 30 + assert [d for n, d in G.degree()] == [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5] + assert nx.diameter(G) == 3 + assert nx.radius(G) == 3 + + G = nx.krackhardt_kite_graph() + assert sorted(G) == list(range(10)) + assert G.number_of_edges() == 18 + assert sorted(d for n, d in G.degree()) == [1, 2, 3, 3, 3, 4, 4, 5, 5, 6] + + G = nx.moebius_kantor_graph() + assert sorted(G) == list(range(16)) + assert G.number_of_edges() == 24 + assert [d for n, d in G.degree()] == 16 * [3] + assert nx.diameter(G) == 4 + + G = nx.octahedral_graph() + assert sorted(G) == list(range(6)) + assert G.number_of_edges() == 12 + assert [d for n, d in G.degree()] == 6 * [4] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.pappus_graph() + assert sorted(G) == list(range(18)) + assert G.number_of_edges() == 27 + assert [d for n, d in G.degree()] == 18 * [3] + assert nx.diameter(G) == 4 + + G = nx.petersen_graph() + assert sorted(G) == list(range(10)) + assert G.number_of_edges() == 15 + assert [d for n, d in G.degree()] == 10 * [3] + assert nx.diameter(G) == 2 + assert nx.radius(G) == 2 + + G = nx.sedgewick_maze_graph() + assert sorted(G) == list(range(8)) + assert G.number_of_edges() == 10 + assert sorted(d for n, d in G.degree()) == [1, 2, 2, 2, 3, 3, 3, 4] + + G = nx.tetrahedral_graph() + assert sorted(G) == list(range(4)) + assert G.number_of_edges() == 6 + assert [d for n, d in G.degree()] == [3, 3, 3, 3] + assert nx.diameter(G) == 1 + assert nx.radius(G) == 1 + + G = nx.truncated_cube_graph() + assert sorted(G) == list(range(24)) + assert G.number_of_edges() == 36 + assert [d for n, d in G.degree()] == 24 * [3] + + G = nx.truncated_tetrahedron_graph() + assert sorted(G) == list(range(12)) + assert G.number_of_edges() == 18 + assert [d for n, d in G.degree()] == 12 * [3] + + G = nx.tutte_graph() + assert sorted(G) == list(range(46)) + assert G.number_of_edges() == 69 + assert [d for n, d in G.degree()] == 46 * [3] + + # Test create_using with directed or multigraphs on small graphs + pytest.raises(nx.NetworkXError, nx.tutte_graph, create_using=nx.DiGraph) + MG = nx.tutte_graph(create_using=nx.MultiGraph) + assert sorted(MG.edges()) == sorted(G.edges()) + + +@pytest.mark.parametrize( + "fn", + ( + nx.bull_graph, + nx.chvatal_graph, + nx.cubical_graph, + nx.diamond_graph, + nx.house_graph, + nx.house_x_graph, + nx.icosahedral_graph, + nx.krackhardt_kite_graph, + nx.octahedral_graph, + nx.petersen_graph, + nx.truncated_cube_graph, + nx.tutte_graph, + ), +) +@pytest.mark.parametrize( + "create_using", (nx.DiGraph, nx.MultiDiGraph, nx.DiGraph([(0, 1)])) +) +def tests_raises_with_directed_create_using(fn, create_using): + with pytest.raises(nx.NetworkXError, match="Directed Graph not supported"): + fn(create_using=create_using) + # All these functions have `create_using` as the first positional argument too + with pytest.raises(nx.NetworkXError, match="Directed Graph not supported"): + fn(create_using) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_spectral_graph_forge.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_spectral_graph_forge.py new file mode 100644 index 0000000..b554bfd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_spectral_graph_forge.py @@ -0,0 +1,49 @@ +import pytest + +pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +from networkx import is_isomorphic +from networkx.exception import NetworkXError +from networkx.generators import karate_club_graph +from networkx.generators.spectral_graph_forge import spectral_graph_forge +from networkx.utils import nodes_equal + + +def test_spectral_graph_forge(): + G = karate_club_graph() + + seed = 54321 + + # common cases, just checking node number preserving and difference + # between identity and modularity cases + H = spectral_graph_forge(G, 0.1, transformation="identity", seed=seed) + assert nodes_equal(G, H) + + I = spectral_graph_forge(G, 0.1, transformation="identity", seed=seed) + assert nodes_equal(G, H) + assert is_isomorphic(I, H) + + I = spectral_graph_forge(G, 0.1, transformation="modularity", seed=seed) + assert nodes_equal(G, I) + + assert not is_isomorphic(I, H) + + # with all the eigenvectors, output graph is identical to the input one + H = spectral_graph_forge(G, 1, transformation="modularity", seed=seed) + assert nodes_equal(G, H) + assert is_isomorphic(G, H) + + # invalid alpha input value, it is silently truncated in [0,1] + H = spectral_graph_forge(G, -1, transformation="identity", seed=seed) + assert nodes_equal(G, H) + + H = spectral_graph_forge(G, 10, transformation="identity", seed=seed) + assert nodes_equal(G, H) + assert is_isomorphic(G, H) + + # invalid transformation mode, checking the error raising + pytest.raises( + NetworkXError, spectral_graph_forge, G, 0.1, transformation="unknown", seed=seed + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_stochastic.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_stochastic.py new file mode 100644 index 0000000..0404d9d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_stochastic.py @@ -0,0 +1,72 @@ +"""Unit tests for the :mod:`networkx.generators.stochastic` module.""" + +import pytest + +import networkx as nx + + +class TestStochasticGraph: + """Unit tests for the :func:`~networkx.stochastic_graph` function.""" + + def test_default_weights(self): + G = nx.DiGraph() + G.add_edge(0, 1) + G.add_edge(0, 2) + S = nx.stochastic_graph(G) + assert nx.is_isomorphic(G, S) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] + + def test_in_place(self): + """Tests for an in-place reweighting of the edges of the graph.""" + G = nx.DiGraph() + G.add_edge(0, 1, weight=1) + G.add_edge(0, 2, weight=1) + nx.stochastic_graph(G, copy=False) + assert sorted(G.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] + + def test_arbitrary_weights(self): + G = nx.DiGraph() + G.add_edge(0, 1, weight=1) + G.add_edge(0, 2, weight=1) + S = nx.stochastic_graph(G) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0.5}), + (0, 2, {"weight": 0.5}), + ] + + def test_multidigraph(self): + G = nx.MultiDiGraph() + G.add_edges_from([(0, 1), (0, 1), (0, 2), (0, 2)]) + S = nx.stochastic_graph(G) + d = {"weight": 0.25} + assert sorted(S.edges(data=True)) == [ + (0, 1, d), + (0, 1, d), + (0, 2, d), + (0, 2, d), + ] + + def test_zero_weights(self): + """Smoke test: ensure ZeroDivisionError is not raised.""" + G = nx.DiGraph() + G.add_edge(0, 1, weight=0) + G.add_edge(0, 2, weight=0) + S = nx.stochastic_graph(G) + assert sorted(S.edges(data=True)) == [ + (0, 1, {"weight": 0}), + (0, 2, {"weight": 0}), + ] + + def test_graph_disallowed(self): + with pytest.raises(nx.NetworkXNotImplemented): + nx.stochastic_graph(nx.Graph()) + + def test_multigraph_disallowed(self): + with pytest.raises(nx.NetworkXNotImplemented): + nx.stochastic_graph(nx.MultiGraph()) diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_sudoku.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_sudoku.py new file mode 100644 index 0000000..7c3560a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_sudoku.py @@ -0,0 +1,92 @@ +"""Unit tests for the :mod:`networkx.generators.sudoku_graph` module.""" + +import pytest + +import networkx as nx + + +def test_sudoku_negative(): + """Raise an error when generating a Sudoku graph of order -1.""" + pytest.raises(nx.NetworkXError, nx.sudoku_graph, n=-1) + + +@pytest.mark.parametrize("n", [0, 1, 2, 3, 4]) +def test_sudoku_generator(n): + """Generate Sudoku graphs of various sizes and verify their properties.""" + G = nx.sudoku_graph(n) + expected_nodes = n**4 + expected_degree = (n - 1) * (3 * n + 1) + expected_edges = expected_nodes * expected_degree // 2 + assert not G.is_directed() + assert not G.is_multigraph() + assert G.number_of_nodes() == expected_nodes + assert G.number_of_edges() == expected_edges + assert all(d == expected_degree for _, d in G.degree) + + if n == 2: + assert sorted(G.neighbors(6)) == [2, 3, 4, 5, 7, 10, 14] + elif n == 3: + assert sorted(G.neighbors(42)) == [ + 6, + 15, + 24, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 43, + 44, + 51, + 52, + 53, + 60, + 69, + 78, + ] + elif n == 4: + assert sorted(G.neighbors(0)) == [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 32, + 33, + 34, + 35, + 48, + 49, + 50, + 51, + 64, + 80, + 96, + 112, + 128, + 144, + 160, + 176, + 192, + 208, + 224, + 240, + ] diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_time_series.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_time_series.py new file mode 100644 index 0000000..5d0cc90 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_time_series.py @@ -0,0 +1,64 @@ +"""Unit tests for the :mod:`networkx.generators.time_series` module.""" + +import itertools + +import networkx as nx + + +def test_visibility_graph__empty_series__empty_graph(): + null_graph = nx.visibility_graph([]) # move along nothing to see here + assert nx.is_empty(null_graph) + + +def test_visibility_graph__single_value_ts__single_node_graph(): + node_graph = nx.visibility_graph([10]) # So Lonely + assert node_graph.number_of_nodes() == 1 + assert node_graph.number_of_edges() == 0 + + +def test_visibility_graph__two_values_ts__single_edge_graph(): + edge_graph = nx.visibility_graph([10, 20]) # Two of Us + assert list(edge_graph.edges) == [(0, 1)] + + +def test_visibility_graph__convex_series__complete_graph(): + series = [i**2 for i in range(10)] # no obstructions + expected_series_length = len(series) + + actual_graph = nx.visibility_graph(series) + + assert actual_graph.number_of_nodes() == expected_series_length + assert actual_graph.number_of_edges() == 45 + assert nx.is_isomorphic(actual_graph, nx.complete_graph(expected_series_length)) + + +def test_visibility_graph__concave_series__path_graph(): + series = [-(i**2) for i in range(10)] # Slip Slidin' Away + expected_node_count = len(series) + + actual_graph = nx.visibility_graph(series) + + assert actual_graph.number_of_nodes() == expected_node_count + assert actual_graph.number_of_edges() == expected_node_count - 1 + assert nx.is_isomorphic(actual_graph, nx.path_graph(expected_node_count)) + + +def test_visibility_graph__flat_series__path_graph(): + series = [0] * 10 # living in 1D flatland + expected_node_count = len(series) + + actual_graph = nx.visibility_graph(series) + + assert actual_graph.number_of_nodes() == expected_node_count + assert actual_graph.number_of_edges() == expected_node_count - 1 + assert nx.is_isomorphic(actual_graph, nx.path_graph(expected_node_count)) + + +def test_visibility_graph_cyclic_series(): + series = list(itertools.islice(itertools.cycle((2, 1, 3)), 17)) # It's so bumpy! + expected_node_count = len(series) + + actual_graph = nx.visibility_graph(series) + + assert actual_graph.number_of_nodes() == expected_node_count + assert actual_graph.number_of_edges() == 25 diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_trees.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_trees.py new file mode 100644 index 0000000..7932436 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_trees.py @@ -0,0 +1,195 @@ +import random + +import pytest + +import networkx as nx +from networkx.utils import arbitrary_element, graphs_equal + + +@pytest.mark.parametrize("prefix_tree_fn", (nx.prefix_tree, nx.prefix_tree_recursive)) +def test_basic_prefix_tree(prefix_tree_fn): + # This example is from the Wikipedia article "Trie" + # . + strings = ["a", "to", "tea", "ted", "ten", "i", "in", "inn"] + T = prefix_tree_fn(strings) + root, NIL = 0, -1 + + def source_label(v): + return T.nodes[v]["source"] + + # First, we check that the tree has the expected + # structure. Recall that each node that corresponds to one of + # the input strings has an edge to the NIL node. + # + # Consider the three children at level 1 in the trie. + a, i, t = sorted(T[root], key=source_label) + # Check the 'a' branch. + assert len(T[a]) == 1 + nil = arbitrary_element(T[a]) + assert len(T[nil]) == 0 + # Check the 'i' branch. + assert len(T[i]) == 2 + nil, in_ = sorted(T[i], key=source_label) + assert len(T[nil]) == 0 + assert len(T[in_]) == 2 + nil, inn = sorted(T[in_], key=source_label) + assert len(T[nil]) == 0 + assert len(T[inn]) == 1 + nil = arbitrary_element(T[inn]) + assert len(T[nil]) == 0 + # Check the 't' branch. + te, to = sorted(T[t], key=source_label) + assert len(T[to]) == 1 + nil = arbitrary_element(T[to]) + assert len(T[nil]) == 0 + tea, ted, ten = sorted(T[te], key=source_label) + assert len(T[tea]) == 1 + assert len(T[ted]) == 1 + assert len(T[ten]) == 1 + nil = arbitrary_element(T[tea]) + assert len(T[nil]) == 0 + nil = arbitrary_element(T[ted]) + assert len(T[nil]) == 0 + nil = arbitrary_element(T[ten]) + assert len(T[nil]) == 0 + + # Next, we check that the "sources" of each of the nodes is the + # rightmost letter in the string corresponding to the path to + # that node. + assert source_label(root) is None + assert source_label(a) == "a" + assert source_label(i) == "i" + assert source_label(t) == "t" + assert source_label(in_) == "n" + assert source_label(inn) == "n" + assert source_label(to) == "o" + assert source_label(te) == "e" + assert source_label(tea) == "a" + assert source_label(ted) == "d" + assert source_label(ten) == "n" + assert source_label(NIL) == "NIL" + + +@pytest.mark.parametrize( + "strings", + ( + ["a", "to", "tea", "ted", "ten", "i", "in", "inn"], + ["ab", "abs", "ad"], + ["ab", "abs", "ad", ""], + ["distant", "disparaging", "distant", "diamond", "ruby"], + ), +) +def test_implementations_consistent(strings): + """Ensure results are consistent between prefix_tree implementations.""" + assert graphs_equal(nx.prefix_tree(strings), nx.prefix_tree_recursive(strings)) + + +def test_random_labeled_rooted_tree(): + for i in range(1, 10): + t1 = nx.random_labeled_rooted_tree(i, seed=42) + t2 = nx.random_labeled_rooted_tree(i, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + assert nx.is_tree(t1) + assert "root" in t1.graph + assert "roots" not in t1.graph + + +def test_random_labeled_tree_n_zero(): + """Tests if n = 0 then the NetworkXPointlessConcept exception is raised.""" + with pytest.raises(nx.NetworkXPointlessConcept): + T = nx.random_labeled_tree(0, seed=1234) + with pytest.raises(nx.NetworkXPointlessConcept): + T = nx.random_labeled_rooted_tree(0, seed=1234) + + +def test_random_labeled_rooted_forest(): + for i in range(1, 10): + t1 = nx.random_labeled_rooted_forest(i, seed=42) + t2 = nx.random_labeled_rooted_forest(i, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + for c in nx.connected_components(t1): + assert nx.is_tree(t1.subgraph(c)) + assert "root" not in t1.graph + assert "roots" in t1.graph + + +def test_random_labeled_rooted_forest_n_zero(): + """Tests generation of empty labeled forests.""" + F = nx.random_labeled_rooted_forest(0, seed=1234) + assert len(F) == 0 + assert len(F.graph["roots"]) == 0 + + +def test_random_unlabeled_rooted_tree(): + for i in range(1, 10): + t1 = nx.random_unlabeled_rooted_tree(i, seed=42) + t2 = nx.random_unlabeled_rooted_tree(i, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + assert nx.is_tree(t1) + assert "root" in t1.graph + assert "roots" not in t1.graph + t = nx.random_unlabeled_rooted_tree(15, number_of_trees=10, seed=43) + random.seed(43) + s = nx.random_unlabeled_rooted_tree(15, number_of_trees=10, seed=random) + for i in range(10): + assert nx.utils.misc.graphs_equal(t[i], s[i]) + assert nx.is_tree(t[i]) + assert "root" in t[i].graph + assert "roots" not in t[i].graph + + +def test_random_unlabeled_tree_n_zero(): + """Tests if n = 0 then the NetworkXPointlessConcept exception is raised.""" + with pytest.raises(nx.NetworkXPointlessConcept): + T = nx.random_unlabeled_tree(0, seed=1234) + with pytest.raises(nx.NetworkXPointlessConcept): + T = nx.random_unlabeled_rooted_tree(0, seed=1234) + + +def test_random_unlabeled_rooted_forest(): + with pytest.raises(ValueError): + nx.random_unlabeled_rooted_forest(10, q=0, seed=42) + for i in range(1, 10): + for q in range(1, i + 1): + t1 = nx.random_unlabeled_rooted_forest(i, q=q, seed=42) + t2 = nx.random_unlabeled_rooted_forest(i, q=q, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + for c in nx.connected_components(t1): + assert nx.is_tree(t1.subgraph(c)) + assert len(c) <= q + assert "root" not in t1.graph + assert "roots" in t1.graph + t = nx.random_unlabeled_rooted_forest(15, number_of_forests=10, seed=43) + random.seed(43) + s = nx.random_unlabeled_rooted_forest(15, number_of_forests=10, seed=random) + for i in range(10): + assert nx.utils.misc.graphs_equal(t[i], s[i]) + for c in nx.connected_components(t[i]): + assert nx.is_tree(t[i].subgraph(c)) + assert "root" not in t[i].graph + assert "roots" in t[i].graph + + +def test_random_unlabeled_forest_n_zero(): + """Tests generation of empty unlabeled forests.""" + F = nx.random_unlabeled_rooted_forest(0, seed=1234) + assert len(F) == 0 + assert len(F.graph["roots"]) == 0 + + +def test_random_unlabeled_tree(): + for i in range(1, 10): + t1 = nx.random_unlabeled_tree(i, seed=42) + t2 = nx.random_unlabeled_tree(i, seed=42) + assert nx.utils.misc.graphs_equal(t1, t2) + assert nx.is_tree(t1) + assert "root" not in t1.graph + assert "roots" not in t1.graph + t = nx.random_unlabeled_tree(10, number_of_trees=10, seed=43) + random.seed(43) + s = nx.random_unlabeled_tree(10, number_of_trees=10, seed=random) + for i in range(10): + assert nx.utils.misc.graphs_equal(t[i], s[i]) + assert nx.is_tree(t[i]) + assert "root" not in t[i].graph + assert "roots" not in t[i].graph diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_triads.py b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_triads.py new file mode 100644 index 0000000..463844b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/tests/test_triads.py @@ -0,0 +1,15 @@ +"""Unit tests for the :mod:`networkx.generators.triads` module.""" + +import pytest + +from networkx import triad_graph + + +def test_triad_graph(): + G = triad_graph("030T") + assert [tuple(e) for e in ("ab", "ac", "cb")] == sorted(G.edges()) + + +def test_invalid_name(): + with pytest.raises(ValueError): + triad_graph("bogus") diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/time_series.py b/.venv/lib/python3.12/site-packages/networkx/generators/time_series.py new file mode 100644 index 0000000..592d773 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/time_series.py @@ -0,0 +1,74 @@ +""" +Time Series Graphs +""" + +import itertools + +import networkx as nx + +__all__ = ["visibility_graph"] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def visibility_graph(series): + """ + Return a Visibility Graph of an input Time Series. + + A visibility graph converts a time series into a graph. The constructed graph + uses integer nodes to indicate which event in the series the node represents. + Edges are formed as follows: consider a bar plot of the series and view that + as a side view of a landscape with a node at the top of each bar. An edge + means that the nodes can be connected by a straight "line-of-sight" without + being obscured by any bars between the nodes. + + The resulting graph inherits several properties of the series in its structure. + Thereby, periodic series convert into regular graphs, random series convert + into random graphs, and fractal series convert into scale-free networks [1]_. + + Parameters + ---------- + series : Sequence[Number] + A Time Series sequence (iterable and sliceable) of numeric values + representing times. + + Returns + ------- + NetworkX Graph + The Visibility Graph of the input series + + Examples + -------- + >>> series_list = [range(10), [2, 1, 3, 2, 1, 3, 2, 1, 3, 2, 1, 3]] + >>> for s in series_list: + ... g = nx.visibility_graph(s) + ... print(g) + Graph with 10 nodes and 9 edges + Graph with 12 nodes and 18 edges + + References + ---------- + .. [1] Lacasa, Lucas, Bartolo Luque, Fernando Ballesteros, Jordi Luque, and Juan Carlos Nuno. + "From time series to complex networks: The visibility graph." Proceedings of the + National Academy of Sciences 105, no. 13 (2008): 4972-4975. + https://www.pnas.org/doi/10.1073/pnas.0709247105 + """ + + # Sequential values are always connected + G = nx.path_graph(len(series)) + nx.set_node_attributes(G, dict(enumerate(series)), "value") + + # Check all combinations of nodes n series + for (n1, t1), (n2, t2) in itertools.combinations(enumerate(series), 2): + # check if any value between obstructs line of sight + slope = (t2 - t1) / (n2 - n1) + offset = t2 - slope * n2 + + obstructed = any( + t >= slope * n + offset + for n, t in enumerate(series[n1 + 1 : n2], start=n1 + 1) + ) + + if not obstructed: + G.add_edge(n1, n2) + + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/trees.py b/.venv/lib/python3.12/site-packages/networkx/generators/trees.py new file mode 100644 index 0000000..a8b24fa --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/trees.py @@ -0,0 +1,1070 @@ +"""Functions for generating trees. + +The functions sampling trees at random in this module come +in two variants: labeled and unlabeled. The labeled variants +sample from every possible tree with the given number of nodes +uniformly at random. The unlabeled variants sample from every +possible *isomorphism class* of trees with the given number +of nodes uniformly at random. + +To understand the difference, consider the following example. +There are two isomorphism classes of trees with four nodes. +One is that of the path graph, the other is that of the +star graph. The unlabeled variant will return a line graph or +a star graph with probability 1/2. + +The labeled variant will return the line graph +with probability 3/4 and the star graph with probability 1/4, +because there are more labeled variants of the line graph +than of the star graph. More precisely, the line graph has +an automorphism group of order 2, whereas the star graph has +an automorphism group of order 6, so the line graph has three +times as many labeled variants as the star graph, and thus +three more chances to be drawn. + +Additionally, some functions in this module can sample rooted +trees and forests uniformly at random. A rooted tree is a tree +with a designated root node. A rooted forest is a disjoint union +of rooted trees. +""" + +from collections import Counter, defaultdict +from math import comb, factorial + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = [ + "prefix_tree", + "prefix_tree_recursive", + "random_labeled_tree", + "random_labeled_rooted_tree", + "random_labeled_rooted_forest", + "random_unlabeled_tree", + "random_unlabeled_rooted_tree", + "random_unlabeled_rooted_forest", +] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def prefix_tree(paths): + """Creates a directed prefix tree from a list of paths. + + Usually the paths are described as strings or lists of integers. + + A "prefix tree" represents the prefix structure of the strings. + Each node represents a prefix of some string. The root represents + the empty prefix with children for the single letter prefixes which + in turn have children for each double letter prefix starting with + the single letter corresponding to the parent node, and so on. + + More generally the prefixes do not need to be strings. A prefix refers + to the start of a sequence. The root has children for each one element + prefix and they have children for each two element prefix that starts + with the one element sequence of the parent, and so on. + + Note that this implementation uses integer nodes with an attribute. + Each node has an attribute "source" whose value is the original element + of the path to which this node corresponds. For example, suppose `paths` + consists of one path: "can". Then the nodes `[1, 2, 3]` which represent + this path have "source" values "c", "a" and "n". + + All the descendants of a node have a common prefix in the sequence/path + associated with that node. From the returned tree, the prefix for each + node can be constructed by traversing the tree up to the root and + accumulating the "source" values along the way. + + The root node is always `0` and has "source" attribute `None`. + The root is the only node with in-degree zero. + The nil node is always `-1` and has "source" attribute `"NIL"`. + The nil node is the only node with out-degree zero. + + + Parameters + ---------- + paths: iterable of paths + An iterable of paths which are themselves sequences. + Matching prefixes among these sequences are identified with + nodes of the prefix tree. One leaf of the tree is associated + with each path. (Identical paths are associated with the same + leaf of the tree.) + + + Returns + ------- + tree: DiGraph + A directed graph representing an arborescence consisting of the + prefix tree generated by `paths`. Nodes are directed "downward", + from parent to child. A special "synthetic" root node is added + to be the parent of the first node in each path. A special + "synthetic" leaf node, the "nil" node `-1`, is added to be the child + of all nodes representing the last element in a path. (The + addition of this nil node technically makes this not an + arborescence but a directed acyclic graph; removing the nil node + makes it an arborescence.) + + + Notes + ----- + The prefix tree is also known as a *trie*. + + + Examples + -------- + Create a prefix tree from a list of strings with common prefixes:: + + >>> paths = ["ab", "abs", "ad"] + >>> T = nx.prefix_tree(paths) + >>> list(T.edges) + [(0, 1), (1, 2), (1, 4), (2, -1), (2, 3), (3, -1), (4, -1)] + + The leaf nodes can be obtained as predecessors of the nil node:: + + >>> root, NIL = 0, -1 + >>> list(T.predecessors(NIL)) + [2, 3, 4] + + To recover the original paths that generated the prefix tree, + traverse up the tree from the node `-1` to the node `0`:: + + >>> recovered = [] + >>> for v in T.predecessors(NIL): + ... prefix = "" + ... while v != root: + ... prefix = str(T.nodes[v]["source"]) + prefix + ... v = next(T.predecessors(v)) # only one predecessor + ... recovered.append(prefix) + >>> sorted(recovered) + ['ab', 'abs', 'ad'] + """ + + def get_children(parent, paths): + children = defaultdict(list) + # Populate dictionary with key(s) as the child/children of the root and + # value(s) as the remaining paths of the corresponding child/children + for path in paths: + # If path is empty, we add an edge to the NIL node. + if not path: + tree.add_edge(parent, NIL) + continue + child, *rest = path + # `child` may exist as the head of more than one path in `paths`. + children[child].append(rest) + return children + + # Initialize the prefix tree with a root node and a nil node. + tree = nx.DiGraph() + root = 0 + tree.add_node(root, source=None) + NIL = -1 + tree.add_node(NIL, source="NIL") + children = get_children(root, paths) + stack = [(root, iter(children.items()))] + while stack: + parent, remaining_children = stack[-1] + try: + child, remaining_paths = next(remaining_children) + # Pop item off stack if there are no remaining children + except StopIteration: + stack.pop() + continue + # We relabel each child with an unused name. + new_name = len(tree) - 1 + # The "source" node attribute stores the original node name. + tree.add_node(new_name, source=child) + tree.add_edge(parent, new_name) + children = get_children(new_name, remaining_paths) + stack.append((new_name, iter(children.items()))) + + return tree + + +@nx._dispatchable(graphs=None, returns_graph=True) +def prefix_tree_recursive(paths): + """Recursively creates a directed prefix tree from a list of paths. + + The original recursive version of prefix_tree for comparison. It is + the same algorithm but the recursion is unrolled onto a stack. + + Usually the paths are described as strings or lists of integers. + + A "prefix tree" represents the prefix structure of the strings. + Each node represents a prefix of some string. The root represents + the empty prefix with children for the single letter prefixes which + in turn have children for each double letter prefix starting with + the single letter corresponding to the parent node, and so on. + + More generally the prefixes do not need to be strings. A prefix refers + to the start of a sequence. The root has children for each one element + prefix and they have children for each two element prefix that starts + with the one element sequence of the parent, and so on. + + Note that this implementation uses integer nodes with an attribute. + Each node has an attribute "source" whose value is the original element + of the path to which this node corresponds. For example, suppose `paths` + consists of one path: "can". Then the nodes `[1, 2, 3]` which represent + this path have "source" values "c", "a" and "n". + + All the descendants of a node have a common prefix in the sequence/path + associated with that node. From the returned tree, ehe prefix for each + node can be constructed by traversing the tree up to the root and + accumulating the "source" values along the way. + + The root node is always `0` and has "source" attribute `None`. + The root is the only node with in-degree zero. + The nil node is always `-1` and has "source" attribute `"NIL"`. + The nil node is the only node with out-degree zero. + + + Parameters + ---------- + paths: iterable of paths + An iterable of paths which are themselves sequences. + Matching prefixes among these sequences are identified with + nodes of the prefix tree. One leaf of the tree is associated + with each path. (Identical paths are associated with the same + leaf of the tree.) + + + Returns + ------- + tree: DiGraph + A directed graph representing an arborescence consisting of the + prefix tree generated by `paths`. Nodes are directed "downward", + from parent to child. A special "synthetic" root node is added + to be the parent of the first node in each path. A special + "synthetic" leaf node, the "nil" node `-1`, is added to be the child + of all nodes representing the last element in a path. (The + addition of this nil node technically makes this not an + arborescence but a directed acyclic graph; removing the nil node + makes it an arborescence.) + + + Notes + ----- + The prefix tree is also known as a *trie*. + + + Examples + -------- + Create a prefix tree from a list of strings with common prefixes:: + + >>> paths = ["ab", "abs", "ad"] + >>> T = nx.prefix_tree(paths) + >>> list(T.edges) + [(0, 1), (1, 2), (1, 4), (2, -1), (2, 3), (3, -1), (4, -1)] + + The leaf nodes can be obtained as predecessors of the nil node. + + >>> root, NIL = 0, -1 + >>> list(T.predecessors(NIL)) + [2, 3, 4] + + To recover the original paths that generated the prefix tree, + traverse up the tree from the node `-1` to the node `0`:: + + >>> recovered = [] + >>> for v in T.predecessors(NIL): + ... prefix = "" + ... while v != root: + ... prefix = str(T.nodes[v]["source"]) + prefix + ... v = next(T.predecessors(v)) # only one predecessor + ... recovered.append(prefix) + >>> sorted(recovered) + ['ab', 'abs', 'ad'] + """ + + def _helper(paths, root, tree): + """Recursively create a trie from the given list of paths. + + `paths` is a list of paths, each of which is itself a list of + nodes, relative to the given `root` (but not including it). This + list of paths will be interpreted as a tree-like structure, in + which two paths that share a prefix represent two branches of + the tree with the same initial segment. + + `root` is the parent of the node at index 0 in each path. + + `tree` is the "accumulator", the :class:`networkx.DiGraph` + representing the branching to which the new nodes and edges will + be added. + + """ + # For each path, remove the first node and make it a child of root. + # Any remaining paths then get processed recursively. + children = defaultdict(list) + for path in paths: + # If path is empty, we add an edge to the NIL node. + if not path: + tree.add_edge(root, NIL) + continue + child, *rest = path + # `child` may exist as the head of more than one path in `paths`. + children[child].append(rest) + # Add a node for each child, connect root, recurse to remaining paths + for child, remaining_paths in children.items(): + # We relabel each child with an unused name. + new_name = len(tree) - 1 + # The "source" node attribute stores the original node name. + tree.add_node(new_name, source=child) + tree.add_edge(root, new_name) + _helper(remaining_paths, new_name, tree) + + # Initialize the prefix tree with a root node and a nil node. + tree = nx.DiGraph() + root = 0 + tree.add_node(root, source=None) + NIL = -1 + tree.add_node(NIL, source="NIL") + # Populate the tree. + _helper(paths, root, tree) + return tree + + +@py_random_state("seed") +@nx._dispatchable(graphs=None, returns_graph=True) +def random_labeled_tree(n, *, seed=None): + """Returns a labeled tree on `n` nodes chosen uniformly at random. + + Generating uniformly distributed random Prüfer sequences and + converting them into the corresponding trees is a straightforward + method of generating uniformly distributed random labeled trees. + This function implements this method. + + Parameters + ---------- + n : int + The number of nodes, greater than zero. + seed : random_state + Indicator of random number generation state. + See :ref:`Randomness` + + Returns + ------- + :class:`networkx.Graph` + A `networkx.Graph` with nodes in the set {0, …, *n* - 1}. + + Raises + ------ + NetworkXPointlessConcept + If `n` is zero (because the null graph is not a tree). + + Examples + -------- + >>> G = nx.random_labeled_tree(5, seed=42) + >>> nx.is_tree(G) + True + >>> G.edges + EdgeView([(0, 1), (0, 3), (0, 2), (2, 4)]) + + A tree with *arbitrarily directed* edges can be created by assigning + generated edges to a ``DiGraph``: + + >>> DG = nx.DiGraph() + >>> DG.add_edges_from(G.edges) + >>> nx.is_tree(DG) + True + >>> DG.edges + OutEdgeView([(0, 1), (0, 3), (0, 2), (2, 4)]) + """ + # Cannot create a Prüfer sequence unless `n` is at least two. + if n == 0: + raise nx.NetworkXPointlessConcept("the null graph is not a tree") + if n == 1: + return nx.empty_graph(1) + return nx.from_prufer_sequence([seed.choice(range(n)) for i in range(n - 2)]) + + +@py_random_state("seed") +@nx._dispatchable(graphs=None, returns_graph=True) +def random_labeled_rooted_tree(n, *, seed=None): + """Returns a labeled rooted tree with `n` nodes. + + The returned tree is chosen uniformly at random from all labeled rooted trees. + + Parameters + ---------- + n : int + The number of nodes + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + :class:`networkx.Graph` + A `networkx.Graph` with integer nodes 0 <= node <= `n` - 1. + The root of the tree is selected uniformly from the nodes. + The "root" graph attribute identifies the root of the tree. + + Notes + ----- + This function returns the result of :func:`random_labeled_tree` + with a randomly selected root. + + Raises + ------ + NetworkXPointlessConcept + If `n` is zero (because the null graph is not a tree). + """ + t = random_labeled_tree(n, seed=seed) + t.graph["root"] = seed.randint(0, n - 1) + return t + + +@py_random_state("seed") +@nx._dispatchable(graphs=None, returns_graph=True) +def random_labeled_rooted_forest(n, *, seed=None): + """Returns a labeled rooted forest with `n` nodes. + + The returned forest is chosen uniformly at random using a + generalization of Prüfer sequences [1]_ in the form described in [2]_. + + Parameters + ---------- + n : int + The number of nodes. + seed : random_state + See :ref:`Randomness`. + + Returns + ------- + :class:`networkx.Graph` + A `networkx.Graph` with integer nodes 0 <= node <= `n` - 1. + The "roots" graph attribute is a set of integers containing the roots. + + References + ---------- + .. [1] Knuth, Donald E. "Another Enumeration of Trees." + Canadian Journal of Mathematics, 20 (1968): 1077-1086. + https://doi.org/10.4153/CJM-1968-104-8 + .. [2] Rubey, Martin. "Counting Spanning Trees". Diplomarbeit + zur Erlangung des akademischen Grades Magister der + Naturwissenschaften an der Formal- und Naturwissenschaftlichen + Fakultät der Universität Wien. Wien, May 2000. + """ + + # Select the number of roots by iterating over the cumulative count of trees + # with at most k roots + def _select_k(n, seed): + r = seed.randint(0, (n + 1) ** (n - 1) - 1) + cum_sum = 0 + for k in range(1, n): + cum_sum += (factorial(n - 1) * n ** (n - k)) // ( + factorial(k - 1) * factorial(n - k) + ) + if r < cum_sum: + return k + + return n + + F = nx.empty_graph(n) + if n == 0: + F.graph["roots"] = {} + return F + # Select the number of roots k + k = _select_k(n, seed) + if k == n: + F.graph["roots"] = set(range(n)) + return F # Nothing to do + # Select the roots + roots = seed.sample(range(n), k) + # Nonroots + p = set(range(n)).difference(roots) + # Coding sequence + N = [seed.randint(0, n - 1) for i in range(n - k - 1)] + # Multiset of elements in N also in p + degree = Counter([x for x in N if x in p]) + # Iterator over the elements of p with degree zero + iterator = iter(x for x in p if degree[x] == 0) + u = last = next(iterator) + # This loop is identical to that for Prüfer sequences, + # except that we can draw nodes only from p + for v in N: + F.add_edge(u, v) + degree[v] -= 1 + if v < last and degree[v] == 0: + u = v + else: + last = u = next(iterator) + + F.add_edge(u, roots[0]) + F.graph["roots"] = set(roots) + return F + + +# The following functions support generation of unlabeled trees and forests. + + +def _to_nx(edges, n_nodes, root=None, roots=None): + """ + Converts the (edges, n_nodes) input to a :class:`networkx.Graph`. + The (edges, n_nodes) input is a list of even length, where each pair + of consecutive integers represents an edge, and an integer `n_nodes`. + Integers in the list are elements of `range(n_nodes)`. + + Parameters + ---------- + edges : list of ints + The flattened list of edges of the graph. + n_nodes : int + The number of nodes of the graph. + root: int (default=None) + If not None, the "root" attribute of the graph will be set to this value. + roots: collection of ints (default=None) + If not None, he "roots" attribute of the graph will be set to this value. + + Returns + ------- + :class:`networkx.Graph` + The graph with `n_nodes` nodes and edges given by `edges`. + """ + G = nx.empty_graph(n_nodes) + G.add_edges_from(edges) + if root is not None: + G.graph["root"] = root + if roots is not None: + G.graph["roots"] = roots + return G + + +def _num_rooted_trees(n, cache_trees): + """Returns the number of unlabeled rooted trees with `n` nodes. + + See also https://oeis.org/A000081. + + Parameters + ---------- + n : int + The number of nodes + cache_trees : list of ints + The $i$-th element is the number of unlabeled rooted trees with $i$ nodes, + which is used as a cache (and is extended to length $n+1$ if needed) + + Returns + ------- + int + The number of unlabeled rooted trees with `n` nodes. + """ + for n_i in range(len(cache_trees), n + 1): + cache_trees.append( + sum( + [ + d * cache_trees[n_i - j * d] * cache_trees[d] + for d in range(1, n_i) + for j in range(1, (n_i - 1) // d + 1) + ] + ) + // (n_i - 1) + ) + return cache_trees[n] + + +def _select_jd_trees(n, cache_trees, seed): + """Returns a pair $(j,d)$ with a specific probability + + Given $n$, returns a pair of positive integers $(j,d)$ with the probability + specified in formula (5) of Chapter 29 of [1]_. + + Parameters + ---------- + n : int + The number of nodes + cache_trees : list of ints + Cache for :func:`_num_rooted_trees`. + seed : random_state + See :ref:`Randomness`. + + Returns + ------- + (int, int) + A pair of positive integers $(j,d)$ satisfying formula (5) of + Chapter 29 of [1]_. + + References + ---------- + .. [1] Nijenhuis, Albert, and Wilf, Herbert S. + "Combinatorial algorithms: for computers and calculators." + Academic Press, 1978. + https://doi.org/10.1016/C2013-0-11243-3 + """ + p = seed.randint(0, _num_rooted_trees(n, cache_trees) * (n - 1) - 1) + cumsum = 0 + for d in range(n - 1, 0, -1): + for j in range(1, (n - 1) // d + 1): + cumsum += ( + d + * _num_rooted_trees(n - j * d, cache_trees) + * _num_rooted_trees(d, cache_trees) + ) + if p < cumsum: + return (j, d) + + +def _random_unlabeled_rooted_tree(n, cache_trees, seed): + """Returns an unlabeled rooted tree with `n` nodes. + + Returns an unlabeled rooted tree with `n` nodes chosen uniformly + at random using the "RANRUT" algorithm from [1]_. + The tree is returned in the form: (list_of_edges, number_of_nodes) + + Parameters + ---------- + n : int + The number of nodes, greater than zero. + cache_trees : list ints + Cache for :func:`_num_rooted_trees`. + seed : random_state + See :ref:`Randomness`. + + Returns + ------- + (list_of_edges, number_of_nodes) : list, int + A random unlabeled rooted tree with `n` nodes as a 2-tuple + ``(list_of_edges, number_of_nodes)``. + The root is node 0. + + References + ---------- + .. [1] Nijenhuis, Albert, and Wilf, Herbert S. + "Combinatorial algorithms: for computers and calculators." + Academic Press, 1978. + https://doi.org/10.1016/C2013-0-11243-3 + """ + if n == 1: + edges, n_nodes = [], 1 + return edges, n_nodes + if n == 2: + edges, n_nodes = [(0, 1)], 2 + return edges, n_nodes + + j, d = _select_jd_trees(n, cache_trees, seed) + t1, t1_nodes = _random_unlabeled_rooted_tree(n - j * d, cache_trees, seed) + t2, t2_nodes = _random_unlabeled_rooted_tree(d, cache_trees, seed) + t12 = [(0, t2_nodes * i + t1_nodes) for i in range(j)] + t1.extend(t12) + for _ in range(j): + t1.extend((n1 + t1_nodes, n2 + t1_nodes) for n1, n2 in t2) + t1_nodes += t2_nodes + + return t1, t1_nodes + + +@py_random_state("seed") +@nx._dispatchable(graphs=None, returns_graph=True) +def random_unlabeled_rooted_tree(n, *, number_of_trees=None, seed=None): + """Returns a number of unlabeled rooted trees uniformly at random + + Returns one or more (depending on `number_of_trees`) + unlabeled rooted trees with `n` nodes drawn uniformly + at random. + + Parameters + ---------- + n : int + The number of nodes + number_of_trees : int or None (default) + If not None, this number of trees is generated and returned. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + :class:`networkx.Graph` or list of :class:`networkx.Graph` + A single `networkx.Graph` (or a list thereof, if `number_of_trees` + is specified) with nodes in the set {0, …, *n* - 1}. + The "root" graph attribute identifies the root of the tree. + + Notes + ----- + The trees are generated using the "RANRUT" algorithm from [1]_. + The algorithm needs to compute some counting functions + that are relatively expensive: in case several trees are needed, + it is advisable to use the `number_of_trees` optional argument + to reuse the counting functions. + + Raises + ------ + NetworkXPointlessConcept + If `n` is zero (because the null graph is not a tree). + + References + ---------- + .. [1] Nijenhuis, Albert, and Wilf, Herbert S. + "Combinatorial algorithms: for computers and calculators." + Academic Press, 1978. + https://doi.org/10.1016/C2013-0-11243-3 + """ + if n == 0: + raise nx.NetworkXPointlessConcept("the null graph is not a tree") + cache_trees = [0, 1] # initial cache of number of rooted trees + if number_of_trees is None: + return _to_nx(*_random_unlabeled_rooted_tree(n, cache_trees, seed), root=0) + return [ + _to_nx(*_random_unlabeled_rooted_tree(n, cache_trees, seed), root=0) + for i in range(number_of_trees) + ] + + +def _num_rooted_forests(n, q, cache_forests): + """Returns the number of unlabeled rooted forests with `n` nodes, and with + no more than `q` nodes per tree. A recursive formula for this is (2) in + [1]_. This function is implemented using dynamic programming instead of + recursion. + + Parameters + ---------- + n : int + The number of nodes. + q : int + The maximum number of nodes for each tree of the forest. + cache_forests : list of ints + The $i$-th element is the number of unlabeled rooted forests with + $i$ nodes, and with no more than `q` nodes per tree; this is used + as a cache (and is extended to length `n` + 1 if needed). + + Returns + ------- + int + The number of unlabeled rooted forests with `n` nodes with no more than + `q` nodes per tree. + + References + ---------- + .. [1] Wilf, Herbert S. "The uniform selection of free trees." + Journal of Algorithms 2.2 (1981): 204-207. + https://doi.org/10.1016/0196-6774(81)90021-3 + """ + for n_i in range(len(cache_forests), n + 1): + q_i = min(n_i, q) + cache_forests.append( + sum( + [ + d * cache_forests[n_i - j * d] * cache_forests[d - 1] + for d in range(1, q_i + 1) + for j in range(1, n_i // d + 1) + ] + ) + // n_i + ) + + return cache_forests[n] + + +def _select_jd_forests(n, q, cache_forests, seed): + """Given `n` and `q`, returns a pair of positive integers $(j,d)$ + such that $j\\leq d$, with probability satisfying (F1) of [1]_. + + Parameters + ---------- + n : int + The number of nodes. + q : int + The maximum number of nodes for each tree of the forest. + cache_forests : list of ints + Cache for :func:`_num_rooted_forests`. + seed : random_state + See :ref:`Randomness`. + + Returns + ------- + (int, int) + A pair of positive integers $(j,d)$ + + References + ---------- + .. [1] Wilf, Herbert S. "The uniform selection of free trees." + Journal of Algorithms 2.2 (1981): 204-207. + https://doi.org/10.1016/0196-6774(81)90021-3 + """ + p = seed.randint(0, _num_rooted_forests(n, q, cache_forests) * n - 1) + cumsum = 0 + for d in range(q, 0, -1): + for j in range(1, n // d + 1): + cumsum += ( + d + * _num_rooted_forests(n - j * d, q, cache_forests) + * _num_rooted_forests(d - 1, q, cache_forests) + ) + if p < cumsum: + return (j, d) + + +def _random_unlabeled_rooted_forest(n, q, cache_trees, cache_forests, seed): + """Returns an unlabeled rooted forest with `n` nodes, and with no more + than `q` nodes per tree, drawn uniformly at random. It is an implementation + of the algorithm "Forest" of [1]_. + + Parameters + ---------- + n : int + The number of nodes. + q : int + The maximum number of nodes per tree. + cache_trees : + Cache for :func:`_num_rooted_trees`. + cache_forests : + Cache for :func:`_num_rooted_forests`. + seed : random_state + See :ref:`Randomness`. + + Returns + ------- + (edges, n, r) : (list, int, list) + The forest (edges, n) and a list r of root nodes. + + References + ---------- + .. [1] Wilf, Herbert S. "The uniform selection of free trees." + Journal of Algorithms 2.2 (1981): 204-207. + https://doi.org/10.1016/0196-6774(81)90021-3 + """ + if n == 0: + return ([], 0, []) + + j, d = _select_jd_forests(n, q, cache_forests, seed) + t1, t1_nodes, r1 = _random_unlabeled_rooted_forest( + n - j * d, q, cache_trees, cache_forests, seed + ) + t2, t2_nodes = _random_unlabeled_rooted_tree(d, cache_trees, seed) + for _ in range(j): + r1.append(t1_nodes) + t1.extend((n1 + t1_nodes, n2 + t1_nodes) for n1, n2 in t2) + t1_nodes += t2_nodes + return t1, t1_nodes, r1 + + +@py_random_state("seed") +@nx._dispatchable(graphs=None, returns_graph=True) +def random_unlabeled_rooted_forest(n, *, q=None, number_of_forests=None, seed=None): + """Returns a forest or list of forests selected at random. + + Returns one or more (depending on `number_of_forests`) + unlabeled rooted forests with `n` nodes, and with no more than + `q` nodes per tree, drawn uniformly at random. + The "roots" graph attribute identifies the roots of the forest. + + Parameters + ---------- + n : int + The number of nodes + q : int or None (default) + The maximum number of nodes per tree. + number_of_forests : int or None (default) + If not None, this number of forests is generated and returned. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + :class:`networkx.Graph` or list of :class:`networkx.Graph` + A single `networkx.Graph` (or a list thereof, if `number_of_forests` + is specified) with nodes in the set {0, …, *n* - 1}. + The "roots" graph attribute is a set containing the roots + of the trees in the forest. + + Notes + ----- + This function implements the algorithm "Forest" of [1]_. + The algorithm needs to compute some counting functions + that are relatively expensive: in case several trees are needed, + it is advisable to use the `number_of_forests` optional argument + to reuse the counting functions. + + Raises + ------ + ValueError + If `n` is non-zero but `q` is zero. + + References + ---------- + .. [1] Wilf, Herbert S. "The uniform selection of free trees." + Journal of Algorithms 2.2 (1981): 204-207. + https://doi.org/10.1016/0196-6774(81)90021-3 + """ + if q is None: + q = n + if q == 0 and n != 0: + raise ValueError("q must be a positive integer if n is positive.") + + cache_trees = [0, 1] # initial cache of number of rooted trees + cache_forests = [1] # initial cache of number of rooted forests + + if number_of_forests is None: + g, nodes, rs = _random_unlabeled_rooted_forest( + n, q, cache_trees, cache_forests, seed + ) + return _to_nx(g, nodes, roots=set(rs)) + + res = [] + for i in range(number_of_forests): + g, nodes, rs = _random_unlabeled_rooted_forest( + n, q, cache_trees, cache_forests, seed + ) + res.append(_to_nx(g, nodes, roots=set(rs))) + return res + + +def _num_trees(n, cache_trees): + """Returns the number of unlabeled trees with `n` nodes. + + See also https://oeis.org/A000055. + + Parameters + ---------- + n : int + The number of nodes. + cache_trees : list of ints + Cache for :func:`_num_rooted_trees`. + + Returns + ------- + int + The number of unlabeled trees with `n` nodes. + """ + r = _num_rooted_trees(n, cache_trees) - sum( + [ + _num_rooted_trees(j, cache_trees) * _num_rooted_trees(n - j, cache_trees) + for j in range(1, n // 2 + 1) + ] + ) + if n % 2 == 0: + r += comb(_num_rooted_trees(n // 2, cache_trees) + 1, 2) + return r + + +def _bicenter(n, cache, seed): + """Returns a bi-centroidal tree on `n` nodes drawn uniformly at random. + + This function implements the algorithm Bicenter of [1]_. + + Parameters + ---------- + n : int + The number of nodes (must be even). + cache : list of ints. + Cache for :func:`_num_rooted_trees`. + seed : random_state + See :ref:`Randomness` + + Returns + ------- + (edges, n) + The tree as a list of edges and number of nodes. + + References + ---------- + .. [1] Wilf, Herbert S. "The uniform selection of free trees." + Journal of Algorithms 2.2 (1981): 204-207. + https://doi.org/10.1016/0196-6774(81)90021-3 + """ + t, t_nodes = _random_unlabeled_rooted_tree(n // 2, cache, seed) + if seed.randint(0, _num_rooted_trees(n // 2, cache)) == 0: + t2, t2_nodes = t, t_nodes + else: + t2, t2_nodes = _random_unlabeled_rooted_tree(n // 2, cache, seed) + t.extend([(n1 + (n // 2), n2 + (n // 2)) for n1, n2 in t2]) + t.append((0, n // 2)) + return t, t_nodes + t2_nodes + + +def _random_unlabeled_tree(n, cache_trees, cache_forests, seed): + """Returns a tree on `n` nodes drawn uniformly at random. + It implements the Wilf's algorithm "Free" of [1]_. + + Parameters + ---------- + n : int + The number of nodes, greater than zero. + cache_trees : list of ints + Cache for :func:`_num_rooted_trees`. + cache_forests : list of ints + Cache for :func:`_num_rooted_forests`. + seed : random_state + Indicator of random number generation state. + See :ref:`Randomness` + + Returns + ------- + (edges, n) + The tree as a list of edges and number of nodes. + + References + ---------- + .. [1] Wilf, Herbert S. "The uniform selection of free trees." + Journal of Algorithms 2.2 (1981): 204-207. + https://doi.org/10.1016/0196-6774(81)90021-3 + """ + if n % 2 == 1: + p = 0 + else: + p = comb(_num_rooted_trees(n // 2, cache_trees) + 1, 2) + if seed.randint(0, _num_trees(n, cache_trees) - 1) < p: + return _bicenter(n, cache_trees, seed) + else: + f, n_f, r = _random_unlabeled_rooted_forest( + n - 1, (n - 1) // 2, cache_trees, cache_forests, seed + ) + for i in r: + f.append((i, n_f)) + return f, n_f + 1 + + +@py_random_state("seed") +@nx._dispatchable(graphs=None, returns_graph=True) +def random_unlabeled_tree(n, *, number_of_trees=None, seed=None): + """Returns a tree or list of trees chosen randomly. + + Returns one or more (depending on `number_of_trees`) + unlabeled trees with `n` nodes drawn uniformly at random. + + Parameters + ---------- + n : int + The number of nodes + number_of_trees : int or None (default) + If not None, this number of trees is generated and returned. + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + :class:`networkx.Graph` or list of :class:`networkx.Graph` + A single `networkx.Graph` (or a list thereof, if + `number_of_trees` is specified) with nodes in the set {0, …, *n* - 1}. + + Raises + ------ + NetworkXPointlessConcept + If `n` is zero (because the null graph is not a tree). + + Notes + ----- + This function generates an unlabeled tree uniformly at random using + Wilf's algorithm "Free" of [1]_. The algorithm needs to + compute some counting functions that are relatively expensive: + in case several trees are needed, it is advisable to use the + `number_of_trees` optional argument to reuse the counting + functions. + + References + ---------- + .. [1] Wilf, Herbert S. "The uniform selection of free trees." + Journal of Algorithms 2.2 (1981): 204-207. + https://doi.org/10.1016/0196-6774(81)90021-3 + """ + if n == 0: + raise nx.NetworkXPointlessConcept("the null graph is not a tree") + + cache_trees = [0, 1] # initial cache of number of rooted trees + cache_forests = [1] # initial cache of number of rooted forests + if number_of_trees is None: + return _to_nx(*_random_unlabeled_tree(n, cache_trees, cache_forests, seed)) + else: + return [ + _to_nx(*_random_unlabeled_tree(n, cache_trees, cache_forests, seed)) + for i in range(number_of_trees) + ] diff --git a/.venv/lib/python3.12/site-packages/networkx/generators/triads.py b/.venv/lib/python3.12/site-packages/networkx/generators/triads.py new file mode 100644 index 0000000..09b722d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/generators/triads.py @@ -0,0 +1,94 @@ +# See https://github.com/networkx/networkx/pull/1474 +# Copyright 2011 Reya Group +# Copyright 2011 Alex Levenson +# Copyright 2011 Diederik van Liere +"""Functions that generate the triad graphs, that is, the possible +digraphs on three nodes. + +""" + +import networkx as nx +from networkx.classes import DiGraph + +__all__ = ["triad_graph"] + +#: Dictionary mapping triad name to list of directed edges in the +#: digraph representation of that triad (with nodes 'a', 'b', and 'c'). +TRIAD_EDGES = { + "003": [], + "012": ["ab"], + "102": ["ab", "ba"], + "021D": ["ba", "bc"], + "021U": ["ab", "cb"], + "021C": ["ab", "bc"], + "111D": ["ac", "ca", "bc"], + "111U": ["ac", "ca", "cb"], + "030T": ["ab", "cb", "ac"], + "030C": ["ba", "cb", "ac"], + "201": ["ab", "ba", "ac", "ca"], + "120D": ["bc", "ba", "ac", "ca"], + "120U": ["ab", "cb", "ac", "ca"], + "120C": ["ab", "bc", "ac", "ca"], + "210": ["ab", "bc", "cb", "ac", "ca"], + "300": ["ab", "ba", "bc", "cb", "ac", "ca"], +} + + +@nx._dispatchable(graphs=None, returns_graph=True) +def triad_graph(triad_name): + """Returns the triad graph with the given name. + + Each string in the following tuple is a valid triad name:: + + ( + "003", + "012", + "102", + "021D", + "021U", + "021C", + "111D", + "111U", + "030T", + "030C", + "201", + "120D", + "120U", + "120C", + "210", + "300", + ) + + Each triad name corresponds to one of the possible valid digraph on + three nodes. + + Parameters + ---------- + triad_name : string + The name of a triad, as described above. + + Returns + ------- + :class:`~networkx.DiGraph` + The digraph on three nodes with the given name. The nodes of the + graph are the single-character strings 'a', 'b', and 'c'. + + Raises + ------ + ValueError + If `triad_name` is not the name of a triad. + + See also + -------- + triadic_census + + """ + if triad_name not in TRIAD_EDGES: + raise ValueError( + f'unknown triad name "{triad_name}"; use one of the triad names' + " in the TRIAD_NAMES constant" + ) + G = DiGraph() + G.add_nodes_from("abc") + G.add_edges_from(TRIAD_EDGES[triad_name]) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/lazy_imports.py b/.venv/lib/python3.12/site-packages/networkx/lazy_imports.py new file mode 100644 index 0000000..c5c05ca --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/lazy_imports.py @@ -0,0 +1,188 @@ +import importlib +import importlib.util +import inspect +import os +import sys +import types + +__all__ = ["attach", "_lazy_import"] + + +def attach(module_name, submodules=None, submod_attrs=None): + """Attach lazily loaded submodules, and functions or other attributes. + + Typically, modules import submodules and attributes as follows:: + + import mysubmodule + import anothersubmodule + + from .foo import someattr + + The idea of this function is to replace the `__init__.py` + module's `__getattr__`, `__dir__`, and `__all__` attributes such that + all imports work exactly the way they normally would, except that the + actual import is delayed until the resulting module object is first used. + + The typical way to call this function, replacing the above imports, is:: + + __getattr__, __lazy_dir__, __all__ = lazy.attach( + __name__, ["mysubmodule", "anothersubmodule"], {"foo": "someattr"} + ) + + This functionality requires Python 3.7 or higher. + + Parameters + ---------- + module_name : str + Typically use __name__. + submodules : set + List of submodules to lazily import. + submod_attrs : dict + Dictionary of submodule -> list of attributes / functions. + These attributes are imported as they are used. + + Returns + ------- + __getattr__, __dir__, __all__ + + """ + if submod_attrs is None: + submod_attrs = {} + + if submodules is None: + submodules = set() + else: + submodules = set(submodules) + + attr_to_modules = { + attr: mod for mod, attrs in submod_attrs.items() for attr in attrs + } + + __all__ = list(submodules | attr_to_modules.keys()) + + def __getattr__(name): + if name in submodules: + return importlib.import_module(f"{module_name}.{name}") + elif name in attr_to_modules: + submod = importlib.import_module(f"{module_name}.{attr_to_modules[name]}") + return getattr(submod, name) + else: + raise AttributeError(f"No {module_name} attribute {name}") + + def __dir__(): + return __all__ + + if os.environ.get("EAGER_IMPORT", ""): + for attr in set(attr_to_modules.keys()) | submodules: + __getattr__(attr) + + return __getattr__, __dir__, list(__all__) + + +class DelayedImportErrorModule(types.ModuleType): + def __init__(self, frame_data, *args, **kwargs): + self.__frame_data = frame_data + super().__init__(*args, **kwargs) + + def __getattr__(self, x): + if x in ("__class__", "__file__", "__frame_data"): + super().__getattr__(x) + else: + fd = self.__frame_data + raise ModuleNotFoundError( + f"No module named '{fd['spec']}'\n\n" + "This error is lazily reported, having originally occurred in\n" + f" File {fd['filename']}, line {fd['lineno']}, in {fd['function']}\n\n" + f"----> {''.join(fd['code_context'] or '').strip()}" + ) + + +def _lazy_import(fullname): + """Return a lazily imported proxy for a module or library. + + Warning + ------- + Importing using this function can currently cause trouble + when the user tries to import from a subpackage of a module before + the package is fully imported. In particular, this idiom may not work: + + np = lazy_import("numpy") + from numpy.lib import recfunctions + + This is due to a difference in the way Python's LazyLoader handles + subpackage imports compared to the normal import process. Hopefully + we will get Python's LazyLoader to fix this, or find a workaround. + In the meantime, this is a potential problem. + + The workaround is to import numpy before importing from the subpackage. + + Notes + ----- + We often see the following pattern:: + + def myfunc(): + import scipy as sp + sp.argmin(...) + .... + + This is to prevent a library, in this case `scipy`, from being + imported at function definition time, since that can be slow. + + This function provides a proxy module that, upon access, imports + the actual module. So the idiom equivalent to the above example is:: + + sp = lazy.load("scipy") + + def myfunc(): + sp.argmin(...) + .... + + The initial import time is fast because the actual import is delayed + until the first attribute is requested. The overall import time may + decrease as well for users that don't make use of large portions + of the library. + + Parameters + ---------- + fullname : str + The full name of the package or subpackage to import. For example:: + + sp = lazy.load("scipy") # import scipy as sp + spla = lazy.load("scipy.linalg") # import scipy.linalg as spla + + Returns + ------- + pm : importlib.util._LazyModule + Proxy module. Can be used like any regularly imported module. + Actual loading of the module occurs upon first attribute request. + + """ + try: + return sys.modules[fullname] + except: + pass + + # Not previously loaded -- look it up + spec = importlib.util.find_spec(fullname) + + if spec is None: + try: + parent = inspect.stack()[1] + frame_data = { + "spec": fullname, + "filename": parent.filename, + "lineno": parent.lineno, + "function": parent.function, + "code_context": parent.code_context, + } + return DelayedImportErrorModule(frame_data, "DelayedImportErrorModule") + finally: + del parent + + module = importlib.util.module_from_spec(spec) + sys.modules[fullname] = module + + loader = importlib.util.LazyLoader(spec.loader) + loader.exec_module(module) + + return module diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__init__.py b/.venv/lib/python3.12/site-packages/networkx/linalg/__init__.py new file mode 100644 index 0000000..119db18 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/__init__.py @@ -0,0 +1,13 @@ +from networkx.linalg.attrmatrix import * +from networkx.linalg import attrmatrix +from networkx.linalg.spectrum import * +from networkx.linalg import spectrum +from networkx.linalg.graphmatrix import * +from networkx.linalg import graphmatrix +from networkx.linalg.laplacianmatrix import * +from networkx.linalg import laplacianmatrix +from networkx.linalg.algebraicconnectivity import * +from networkx.linalg.modularitymatrix import * +from networkx.linalg import modularitymatrix +from networkx.linalg.bethehessianmatrix import * +from networkx.linalg import bethehessianmatrix diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..d6b03e8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-312.pyc new file mode 100644 index 0000000..37cf926 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/algebraicconnectivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/attrmatrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/attrmatrix.cpython-312.pyc new file mode 100644 index 0000000..f5bb4a7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/attrmatrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/bethehessianmatrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/bethehessianmatrix.cpython-312.pyc new file mode 100644 index 0000000..b01abe0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/bethehessianmatrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/graphmatrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/graphmatrix.cpython-312.pyc new file mode 100644 index 0000000..8560886 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/graphmatrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-312.pyc new file mode 100644 index 0000000..f45ac52 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/laplacianmatrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-312.pyc new file mode 100644 index 0000000..9111326 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/modularitymatrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/spectrum.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/spectrum.cpython-312.pyc new file mode 100644 index 0000000..7e20ec7 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/__pycache__/spectrum.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/algebraicconnectivity.py b/.venv/lib/python3.12/site-packages/networkx/linalg/algebraicconnectivity.py new file mode 100644 index 0000000..55073c2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/algebraicconnectivity.py @@ -0,0 +1,655 @@ +""" +Algebraic connectivity and Fiedler vectors of undirected graphs. +""" + +import networkx as nx +from networkx.utils import ( + not_implemented_for, + np_random_state, + reverse_cuthill_mckee_ordering, +) + +__all__ = [ + "algebraic_connectivity", + "fiedler_vector", + "spectral_ordering", + "spectral_bisection", +] + + +class _PCGSolver: + """Preconditioned conjugate gradient method. + + To solve Ax = b: + M = A.diagonal() # or some other preconditioner + solver = _PCGSolver(lambda x: A * x, lambda x: M * x) + x = solver.solve(b) + + The inputs A and M are functions which compute + matrix multiplication on the argument. + A - multiply by the matrix A in Ax=b + M - multiply by M, the preconditioner surrogate for A + + Warning: There is no limit on number of iterations. + """ + + def __init__(self, A, M): + self._A = A + self._M = M + + def solve(self, B, tol): + import numpy as np + + # Densifying step - can this be kept sparse? + B = np.asarray(B) + X = np.ndarray(B.shape, order="F") + for j in range(B.shape[1]): + X[:, j] = self._solve(B[:, j], tol) + return X + + def _solve(self, b, tol): + import numpy as np + import scipy as sp + + A = self._A + M = self._M + tol *= sp.linalg.blas.dasum(b) + # Initialize. + x = np.zeros(b.shape) + r = b.copy() + z = M(r) + rz = sp.linalg.blas.ddot(r, z) + p = z.copy() + # Iterate. + while True: + Ap = A(p) + alpha = rz / sp.linalg.blas.ddot(p, Ap) + x = sp.linalg.blas.daxpy(p, x, a=alpha) + r = sp.linalg.blas.daxpy(Ap, r, a=-alpha) + if sp.linalg.blas.dasum(r) < tol: + return x + z = M(r) + beta = sp.linalg.blas.ddot(r, z) + beta, rz = beta / rz, beta + p = sp.linalg.blas.daxpy(p, z, a=beta) + + +class _LUSolver: + """LU factorization. + + To solve Ax = b: + solver = _LUSolver(A) + x = solver.solve(b) + + optional argument `tol` on solve method is ignored but included + to match _PCGsolver API. + """ + + def __init__(self, A): + import scipy as sp + + self._LU = sp.sparse.linalg.splu( + A, + permc_spec="MMD_AT_PLUS_A", + diag_pivot_thresh=0.0, + options={"Equil": True, "SymmetricMode": True}, + ) + + def solve(self, B, tol=None): + import numpy as np + + B = np.asarray(B) + X = np.ndarray(B.shape, order="F") + for j in range(B.shape[1]): + X[:, j] = self._LU.solve(B[:, j]) + return X + + +def _preprocess_graph(G, weight): + """Compute edge weights and eliminate zero-weight edges.""" + if G.is_directed(): + H = nx.MultiGraph() + H.add_nodes_from(G) + H.add_weighted_edges_from( + ((u, v, e.get(weight, 1.0)) for u, v, e in G.edges(data=True) if u != v), + weight=weight, + ) + G = H + if not G.is_multigraph(): + edges = ( + (u, v, abs(e.get(weight, 1.0))) for u, v, e in G.edges(data=True) if u != v + ) + else: + edges = ( + (u, v, sum(abs(e.get(weight, 1.0)) for e in G[u][v].values())) + for u, v in G.edges() + if u != v + ) + H = nx.Graph() + H.add_nodes_from(G) + H.add_weighted_edges_from((u, v, e) for u, v, e in edges if e != 0) + return H + + +def _rcm_estimate(G, nodelist): + """Estimate the Fiedler vector using the reverse Cuthill-McKee ordering.""" + import numpy as np + + G = G.subgraph(nodelist) + order = reverse_cuthill_mckee_ordering(G) + n = len(nodelist) + index = dict(zip(nodelist, range(n))) + x = np.ndarray(n, dtype=float) + for i, u in enumerate(order): + x[index[u]] = i + x -= (n - 1) / 2.0 + return x + + +def _tracemin_fiedler(L, X, normalized, tol, method): + """Compute the Fiedler vector of L using the TraceMIN-Fiedler algorithm. + + The Fiedler vector of a connected undirected graph is the eigenvector + corresponding to the second smallest eigenvalue of the Laplacian matrix + of the graph. This function starts with the Laplacian L, not the Graph. + + Parameters + ---------- + L : Laplacian of a possibly weighted or normalized, but undirected graph + + X : Initial guess for a solution. Usually a matrix of random numbers. + This function allows more than one column in X to identify more than + one eigenvector if desired. + + normalized : bool + Whether the normalized Laplacian matrix is used. + + tol : float + Tolerance of relative residual in eigenvalue computation. + Warning: There is no limit on number of iterations. + + method : string + Should be 'tracemin_pcg' or 'tracemin_lu'. + Otherwise exception is raised. + + Returns + ------- + sigma, X : Two NumPy arrays of floats. + The lowest eigenvalues and corresponding eigenvectors of L. + The size of input X determines the size of these outputs. + As this is for Fiedler vectors, the zero eigenvalue (and + constant eigenvector) are avoided. + """ + import numpy as np + import scipy as sp + + n = X.shape[0] + + if normalized: + # Form the normalized Laplacian matrix and determine the eigenvector of + # its nullspace. + e = np.sqrt(L.diagonal()) + # TODO: rm csr_array wrapper when spdiags array creation becomes available + D = sp.sparse.csr_array(sp.sparse.spdiags(1 / e, 0, n, n, format="csr")) + L = D @ L @ D + e *= 1.0 / np.linalg.norm(e, 2) + + if normalized: + + def project(X): + """Make X orthogonal to the nullspace of L.""" + X = np.asarray(X) + for j in range(X.shape[1]): + X[:, j] -= (X[:, j] @ e) * e + + else: + + def project(X): + """Make X orthogonal to the nullspace of L.""" + X = np.asarray(X) + for j in range(X.shape[1]): + X[:, j] -= X[:, j].sum() / n + + if method == "tracemin_pcg": + D = L.diagonal().astype(float) + solver = _PCGSolver(lambda x: L @ x, lambda x: D * x) + elif method == "tracemin_lu": + # Convert A to CSC to suppress SparseEfficiencyWarning. + A = sp.sparse.csc_array(L, dtype=float, copy=True) + # Force A to be nonsingular. Since A is the Laplacian matrix of a + # connected graph, its rank deficiency is one, and thus one diagonal + # element needs to modified. Changing to infinity forces a zero in the + # corresponding element in the solution. + i = (A.indptr[1:] - A.indptr[:-1]).argmax() + A[i, i] = np.inf + solver = _LUSolver(A) + else: + raise nx.NetworkXError(f"Unknown linear system solver: {method}") + + # Initialize. + Lnorm = abs(L).sum(axis=1).flatten().max() + project(X) + W = np.ndarray(X.shape, order="F") + + while True: + # Orthonormalize X. + X = np.linalg.qr(X)[0] + # Compute iteration matrix H. + W[:, :] = L @ X + H = X.T @ W + sigma, Y = sp.linalg.eigh(H, overwrite_a=True) + # Compute the Ritz vectors. + X = X @ Y + # Test for convergence exploiting the fact that L * X == W * Y. + res = sp.linalg.blas.dasum(W @ Y[:, 0] - sigma[0] * X[:, 0]) / Lnorm + if res < tol: + break + # Compute X = L \ X / (X' * (L \ X)). + # L \ X can have an arbitrary projection on the nullspace of L, + # which will be eliminated. + W[:, :] = solver.solve(X, tol) + X = (sp.linalg.inv(W.T @ X) @ W.T).T # Preserves Fortran storage order. + project(X) + + return sigma, np.asarray(X) + + +def _get_fiedler_func(method): + """Returns a function that solves the Fiedler eigenvalue problem.""" + import numpy as np + + if method == "tracemin": # old style keyword `. + + Returns + ------- + algebraic_connectivity : float + Algebraic connectivity. + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + NetworkXError + If G has less than two nodes. + + Notes + ----- + Edge weights are interpreted by their absolute values. For MultiGraph's, + weights of parallel edges are summed. Zero-weighted edges are ignored. + + See Also + -------- + laplacian_matrix + + Examples + -------- + For undirected graphs algebraic connectivity can tell us if a graph is connected or not + `G` is connected iff ``algebraic_connectivity(G) > 0``: + + >>> G = nx.complete_graph(5) + >>> nx.algebraic_connectivity(G) > 0 + True + >>> G.add_node(10) # G is no longer connected + >>> nx.algebraic_connectivity(G) > 0 + False + + """ + if len(G) < 2: + raise nx.NetworkXError("graph has less than two nodes.") + G = _preprocess_graph(G, weight) + if not nx.is_connected(G): + return 0.0 + + L = nx.laplacian_matrix(G) + if L.shape[0] == 2: + return 2.0 * float(L[0, 0]) if not normalized else 2.0 + + find_fiedler = _get_fiedler_func(method) + x = None if method != "lobpcg" else _rcm_estimate(G, G) + sigma, fiedler = find_fiedler(L, x, normalized, tol, seed) + return float(sigma) + + +@not_implemented_for("directed") +@np_random_state(5) +@nx._dispatchable(edge_attrs="weight") +def fiedler_vector( + G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None +): + """Returns the Fiedler vector of a connected undirected graph. + + The Fiedler vector of a connected undirected graph is the eigenvector + corresponding to the second smallest eigenvalue of the Laplacian matrix + of the graph. + + Parameters + ---------- + G : NetworkX graph + An undirected graph. + + weight : object, optional (default: None) + The data key used to determine the weight of each edge. If None, then + each edge has unit weight. + + normalized : bool, optional (default: False) + Whether the normalized Laplacian matrix is used. + + tol : float, optional (default: 1e-8) + Tolerance of relative residual in eigenvalue computation. + + method : string, optional (default: 'tracemin_pcg') + Method of eigenvalue computation. It must be one of the tracemin + options shown below (TraceMIN), 'lanczos' (Lanczos iteration) + or 'lobpcg' (LOBPCG). + + The TraceMIN algorithm uses a linear system solver. The following + values allow specifying the solver to be used. + + =============== ======================================== + Value Solver + =============== ======================================== + 'tracemin_pcg' Preconditioned conjugate gradient method + 'tracemin_lu' LU factorization + =============== ======================================== + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + fiedler_vector : NumPy array of floats. + Fiedler vector. + + Raises + ------ + NetworkXNotImplemented + If G is directed. + + NetworkXError + If G has less than two nodes or is not connected. + + Notes + ----- + Edge weights are interpreted by their absolute values. For MultiGraph's, + weights of parallel edges are summed. Zero-weighted edges are ignored. + + See Also + -------- + laplacian_matrix + + Examples + -------- + Given a connected graph the signs of the values in the Fiedler vector can be + used to partition the graph into two components. + + >>> G = nx.barbell_graph(5, 0) + >>> nx.fiedler_vector(G, normalized=True, seed=1) + array([-0.32864129, -0.32864129, -0.32864129, -0.32864129, -0.26072899, + 0.26072899, 0.32864129, 0.32864129, 0.32864129, 0.32864129]) + + The connected components are the two 5-node cliques of the barbell graph. + """ + import numpy as np + + if len(G) < 2: + raise nx.NetworkXError("graph has less than two nodes.") + G = _preprocess_graph(G, weight) + if not nx.is_connected(G): + raise nx.NetworkXError("graph is not connected.") + + if len(G) == 2: + return np.array([1.0, -1.0]) + + find_fiedler = _get_fiedler_func(method) + L = nx.laplacian_matrix(G) + x = None if method != "lobpcg" else _rcm_estimate(G, G) + sigma, fiedler = find_fiedler(L, x, normalized, tol, seed) + return fiedler + + +@np_random_state(5) +@nx._dispatchable(edge_attrs="weight") +def spectral_ordering( + G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None +): + """Compute the spectral_ordering of a graph. + + The spectral ordering of a graph is an ordering of its nodes where nodes + in the same weakly connected components appear contiguous and ordered by + their corresponding elements in the Fiedler vector of the component. + + Parameters + ---------- + G : NetworkX graph + A graph. + + weight : object, optional (default: None) + The data key used to determine the weight of each edge. If None, then + each edge has unit weight. + + normalized : bool, optional (default: False) + Whether the normalized Laplacian matrix is used. + + tol : float, optional (default: 1e-8) + Tolerance of relative residual in eigenvalue computation. + + method : string, optional (default: 'tracemin_pcg') + Method of eigenvalue computation. It must be one of the tracemin + options shown below (TraceMIN), 'lanczos' (Lanczos iteration) + or 'lobpcg' (LOBPCG). + + The TraceMIN algorithm uses a linear system solver. The following + values allow specifying the solver to be used. + + =============== ======================================== + Value Solver + =============== ======================================== + 'tracemin_pcg' Preconditioned conjugate gradient method + 'tracemin_lu' LU factorization + =============== ======================================== + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + spectral_ordering : NumPy array of floats. + Spectral ordering of nodes. + + Raises + ------ + NetworkXError + If G is empty. + + Notes + ----- + Edge weights are interpreted by their absolute values. For MultiGraph's, + weights of parallel edges are summed. Zero-weighted edges are ignored. + + See Also + -------- + laplacian_matrix + """ + if len(G) == 0: + raise nx.NetworkXError("graph is empty.") + G = _preprocess_graph(G, weight) + + find_fiedler = _get_fiedler_func(method) + order = [] + for component in nx.connected_components(G): + size = len(component) + if size > 2: + L = nx.laplacian_matrix(G, component) + x = None if method != "lobpcg" else _rcm_estimate(G, component) + sigma, fiedler = find_fiedler(L, x, normalized, tol, seed) + sort_info = zip(fiedler, range(size), component) + order.extend(u for x, c, u in sorted(sort_info)) + else: + order.extend(component) + + return order + + +@nx._dispatchable(edge_attrs="weight") +def spectral_bisection( + G, weight="weight", normalized=False, tol=1e-8, method="tracemin_pcg", seed=None +): + """Bisect the graph using the Fiedler vector. + + This method uses the Fiedler vector to bisect a graph. + The partition is defined by the nodes which are associated with + either positive or negative values in the vector. + + Parameters + ---------- + G : NetworkX Graph + + weight : str, optional (default: weight) + The data key used to determine the weight of each edge. If None, then + each edge has unit weight. + + normalized : bool, optional (default: False) + Whether the normalized Laplacian matrix is used. + + tol : float, optional (default: 1e-8) + Tolerance of relative residual in eigenvalue computation. + + method : string, optional (default: 'tracemin_pcg') + Method of eigenvalue computation. It must be one of the tracemin + options shown below (TraceMIN), 'lanczos' (Lanczos iteration) + or 'lobpcg' (LOBPCG). + + The TraceMIN algorithm uses a linear system solver. The following + values allow specifying the solver to be used. + + =============== ======================================== + Value Solver + =============== ======================================== + 'tracemin_pcg' Preconditioned conjugate gradient method + 'tracemin_lu' LU factorization + =============== ======================================== + + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + bisection : tuple of sets + Sets with the bisection of nodes + + Examples + -------- + >>> G = nx.barbell_graph(3, 0) + >>> nx.spectral_bisection(G) + ({0, 1, 2}, {3, 4, 5}) + + References + ---------- + .. [1] M. E. J Newman 'Networks: An Introduction', pages 364-370 + Oxford University Press 2011. + """ + import numpy as np + + v = nx.fiedler_vector(G, weight, normalized, tol, method, seed) + nodes = np.array(list(G)) + pos_vals = v >= 0 + + return set(nodes[~pos_vals].tolist()), set(nodes[pos_vals].tolist()) diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/attrmatrix.py b/.venv/lib/python3.12/site-packages/networkx/linalg/attrmatrix.py new file mode 100644 index 0000000..989e8ff --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/attrmatrix.py @@ -0,0 +1,466 @@ +""" +Functions for constructing matrix-like objects from graph attributes. +""" + +import networkx as nx + +__all__ = ["attr_matrix", "attr_sparse_matrix"] + + +def _node_value(G, node_attr): + """Returns a function that returns a value from G.nodes[u]. + + We return a function expecting a node as its sole argument. Then, in the + simplest scenario, the returned function will return G.nodes[u][node_attr]. + However, we also handle the case when `node_attr` is None (returns the node) + or when `node_attr` is a function itself. + + Parameters + ---------- + G : graph + A NetworkX graph + + node_attr : {None, str, callable} + Specification of how the value of the node attribute should be obtained + from the node attribute dictionary. + + Returns + ------- + value : function + A function expecting a node as its sole argument. The function will + returns a value from G.nodes[u] that depends on `edge_attr`. + + """ + if node_attr is None: + + def value(u): + return u + + elif not callable(node_attr): + # assume it is a key for the node attribute dictionary + def value(u): + return G.nodes[u][node_attr] + + else: + # Advanced: Allow users to specify something else. + # + # For example, + # node_attr = lambda u: G.nodes[u].get('size', .5) * 3 + # + value = node_attr + + return value + + +def _edge_value(G, edge_attr): + """Returns a function that returns a value from G[u][v]. + + Suppose there exists an edge between u and v. Then we return a function + expecting u and v as arguments. For Graph and DiGraph, G[u][v] is + the edge attribute dictionary, and the function (essentially) returns + G[u][v][edge_attr]. However, we also handle cases when `edge_attr` is None + and when it is a function itself. For MultiGraph and MultiDiGraph, G[u][v] + is a dictionary of all edges between u and v. In this case, the returned + function sums the value of `edge_attr` for every edge between u and v. + + Parameters + ---------- + G : graph + A NetworkX graph + + edge_attr : {None, str, callable} + Specification of how the value of the edge attribute should be obtained + from the edge attribute dictionary, G[u][v]. For multigraphs, G[u][v] + is a dictionary of all the edges between u and v. This allows for + special treatment of multiedges. + + Returns + ------- + value : function + A function expecting two nodes as parameters. The nodes should + represent the from- and to- node of an edge. The function will + return a value from G[u][v] that depends on `edge_attr`. + + """ + + if edge_attr is None: + # topological count of edges + + if G.is_multigraph(): + + def value(u, v): + return len(G[u][v]) + + else: + + def value(u, v): + return 1 + + elif not callable(edge_attr): + # assume it is a key for the edge attribute dictionary + + if edge_attr == "weight": + # provide a default value + if G.is_multigraph(): + + def value(u, v): + return sum(d.get(edge_attr, 1) for d in G[u][v].values()) + + else: + + def value(u, v): + return G[u][v].get(edge_attr, 1) + + else: + # otherwise, the edge attribute MUST exist for each edge + if G.is_multigraph(): + + def value(u, v): + return sum(d[edge_attr] for d in G[u][v].values()) + + else: + + def value(u, v): + return G[u][v][edge_attr] + + else: + # Advanced: Allow users to specify something else. + # + # Alternative default value: + # edge_attr = lambda u,v: G[u][v].get('thickness', .5) + # + # Function on an attribute: + # edge_attr = lambda u,v: abs(G[u][v]['weight']) + # + # Handle Multi(Di)Graphs differently: + # edge_attr = lambda u,v: numpy.prod([d['size'] for d in G[u][v].values()]) + # + # Ignore multiple edges + # edge_attr = lambda u,v: 1 if len(G[u][v]) else 0 + # + value = edge_attr + + return value + + +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +def attr_matrix( + G, + edge_attr=None, + node_attr=None, + normalized=False, + rc_order=None, + dtype=None, + order=None, +): + """Returns the attribute matrix using attributes from `G` as a numpy array. + + If only `G` is passed in, then the adjacency matrix is constructed. + + Let A be a discrete set of values for the node attribute `node_attr`. Then + the elements of A represent the rows and columns of the constructed matrix. + Now, iterate through every edge e=(u,v) in `G` and consider the value + of the edge attribute `edge_attr`. If ua and va are the values of the + node attribute `node_attr` for u and v, respectively, then the value of + the edge attribute is added to the matrix element at (ua, va). + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the attribute matrix. + + edge_attr : str, optional (default: number of edges for each matrix element) + Each element of the matrix represents a running total of the + specified edge attribute for edges whose node attributes correspond + to the rows/cols of the matrix. The attribute must be present for + all edges in the graph. If no attribute is specified, then we + just count the number of edges whose node attributes correspond + to the matrix element. + + node_attr : str, optional (default: use nodes of the graph) + Each row and column in the matrix represents a particular value + of the node attribute. The attribute must be present for all nodes + in the graph. Note, the values of this attribute should be reliably + hashable. So, float values are not recommended. If no attribute is + specified, then the rows and columns will be the nodes of the graph. + + normalized : bool, optional (default: False) + If True, then each row is normalized by the summation of its values. + + rc_order : list, optional (default: order of nodes in G) + A list of the node attribute values. This list specifies the ordering + of rows and columns of the array. If no ordering is provided, then + the ordering will be the same as the node order in `G`. + When `rc_order` is `None`, the function returns a 2-tuple ``(matrix, ordering)`` + + Other Parameters + ---------------- + dtype : NumPy data-type, optional + A valid NumPy dtype used to initialize the array. Keep in mind certain + dtypes can yield unexpected results if the array is to be normalized. + The parameter is passed to numpy.zeros(). If unspecified, the NumPy + default is used. + + order : {'C', 'F'}, optional + Whether to store multidimensional data in C- or Fortran-contiguous + (row- or column-wise) order in memory. This parameter is passed to + numpy.zeros(). If unspecified, the NumPy default is used. + + Returns + ------- + M : 2D NumPy ndarray + The attribute matrix. + + ordering : list + If `rc_order` was specified, then only the attribute matrix is returned. + However, if `rc_order` was None, then the ordering used to construct + the matrix is returned as well. + + Examples + -------- + Construct an adjacency matrix: + + >>> G = nx.Graph() + >>> G.add_edge(0, 1, thickness=1, weight=3) + >>> G.add_edge(0, 2, thickness=2) + >>> G.add_edge(1, 2, thickness=3) + >>> nx.attr_matrix(G, rc_order=[0, 1, 2]) + array([[0., 1., 1.], + [1., 0., 1.], + [1., 1., 0.]]) + + Alternatively, we can obtain the matrix describing edge thickness. + + >>> nx.attr_matrix(G, edge_attr="thickness", rc_order=[0, 1, 2]) + array([[0., 1., 2.], + [1., 0., 3.], + [2., 3., 0.]]) + + We can also color the nodes and ask for the probability distribution over + all edges (u,v) describing: + + Pr(v has color Y | u has color X) + + >>> G.nodes[0]["color"] = "red" + >>> G.nodes[1]["color"] = "red" + >>> G.nodes[2]["color"] = "blue" + >>> rc = ["red", "blue"] + >>> nx.attr_matrix(G, node_attr="color", normalized=True, rc_order=rc) + array([[0.33333333, 0.66666667], + [1. , 0. ]]) + + For example, the above tells us that for all edges (u,v): + + Pr( v is red | u is red) = 1/3 + Pr( v is blue | u is red) = 2/3 + + Pr( v is red | u is blue) = 1 + Pr( v is blue | u is blue) = 0 + + Finally, we can obtain the total weights listed by the node colors. + + >>> nx.attr_matrix(G, edge_attr="weight", node_attr="color", rc_order=rc) + array([[3., 2.], + [2., 0.]]) + + Thus, the total weight over all edges (u,v) with u and v having colors: + + (red, red) is 3 # the sole contribution is from edge (0,1) + (red, blue) is 2 # contributions from edges (0,2) and (1,2) + (blue, red) is 2 # same as (red, blue) since graph is undirected + (blue, blue) is 0 # there are no edges with blue endpoints + + """ + import numpy as np + + edge_value = _edge_value(G, edge_attr) + node_value = _node_value(G, node_attr) + + if rc_order is None: + ordering = list({node_value(n) for n in G}) + else: + ordering = rc_order + + N = len(ordering) + undirected = not G.is_directed() + index = dict(zip(ordering, range(N))) + M = np.zeros((N, N), dtype=dtype, order=order) + + seen = set() + for u, nbrdict in G.adjacency(): + for v in nbrdict: + # Obtain the node attribute values. + i, j = index[node_value(u)], index[node_value(v)] + if v not in seen: + M[i, j] += edge_value(u, v) + if undirected: + M[j, i] = M[i, j] + + if undirected: + seen.add(u) + + if normalized: + M /= M.sum(axis=1).reshape((N, 1)) + + if rc_order is None: + return M, ordering + else: + return M + + +@nx._dispatchable(edge_attrs={"edge_attr": None}, node_attrs="node_attr") +def attr_sparse_matrix( + G, edge_attr=None, node_attr=None, normalized=False, rc_order=None, dtype=None +): + """Returns a SciPy sparse array using attributes from G. + + If only `G` is passed in, then the adjacency matrix is constructed. + + Let A be a discrete set of values for the node attribute `node_attr`. Then + the elements of A represent the rows and columns of the constructed matrix. + Now, iterate through every edge e=(u,v) in `G` and consider the value + of the edge attribute `edge_attr`. If ua and va are the values of the + node attribute `node_attr` for u and v, respectively, then the value of + the edge attribute is added to the matrix element at (ua, va). + + Parameters + ---------- + G : graph + The NetworkX graph used to construct the NumPy matrix. + + edge_attr : str, optional (default: number of edges for each matrix element) + Each element of the matrix represents a running total of the + specified edge attribute for edges whose node attributes correspond + to the rows/cols of the matrix. The attribute must be present for + all edges in the graph. If no attribute is specified, then we + just count the number of edges whose node attributes correspond + to the matrix element. + + node_attr : str, optional (default: use nodes of the graph) + Each row and column in the matrix represents a particular value + of the node attribute. The attribute must be present for all nodes + in the graph. Note, the values of this attribute should be reliably + hashable. So, float values are not recommended. If no attribute is + specified, then the rows and columns will be the nodes of the graph. + + normalized : bool, optional (default: False) + If True, then each row is normalized by the summation of its values. + + rc_order : list, optional (default: order of nodes in G) + A list of the node attribute values. This list specifies the ordering + of rows and columns of the array and the return value. If no ordering + is provided, then the ordering will be that of nodes in `G`. + + Other Parameters + ---------------- + dtype : NumPy data-type, optional + A valid NumPy dtype used to initialize the array. Keep in mind certain + dtypes can yield unexpected results if the array is to be normalized. + The parameter is passed to numpy.zeros(). If unspecified, the NumPy + default is used. + + Returns + ------- + M : SciPy sparse array + The attribute matrix. + + ordering : list + If `rc_order` was specified, then only the matrix is returned. + However, if `rc_order` was None, then the ordering used to construct + the matrix is returned as well. + + Examples + -------- + Construct an adjacency matrix: + + >>> G = nx.Graph() + >>> G.add_edge(0, 1, thickness=1, weight=3) + >>> G.add_edge(0, 2, thickness=2) + >>> G.add_edge(1, 2, thickness=3) + >>> M = nx.attr_sparse_matrix(G, rc_order=[0, 1, 2]) + >>> M.toarray() + array([[0., 1., 1.], + [1., 0., 1.], + [1., 1., 0.]]) + + Alternatively, we can obtain the matrix describing edge thickness. + + >>> M = nx.attr_sparse_matrix(G, edge_attr="thickness", rc_order=[0, 1, 2]) + >>> M.toarray() + array([[0., 1., 2.], + [1., 0., 3.], + [2., 3., 0.]]) + + We can also color the nodes and ask for the probability distribution over + all edges (u,v) describing: + + Pr(v has color Y | u has color X) + + >>> G.nodes[0]["color"] = "red" + >>> G.nodes[1]["color"] = "red" + >>> G.nodes[2]["color"] = "blue" + >>> rc = ["red", "blue"] + >>> M = nx.attr_sparse_matrix(G, node_attr="color", normalized=True, rc_order=rc) + >>> M.toarray() + array([[0.33333333, 0.66666667], + [1. , 0. ]]) + + For example, the above tells us that for all edges (u,v): + + Pr( v is red | u is red) = 1/3 + Pr( v is blue | u is red) = 2/3 + + Pr( v is red | u is blue) = 1 + Pr( v is blue | u is blue) = 0 + + Finally, we can obtain the total weights listed by the node colors. + + >>> M = nx.attr_sparse_matrix(G, edge_attr="weight", node_attr="color", rc_order=rc) + >>> M.toarray() + array([[3., 2.], + [2., 0.]]) + + Thus, the total weight over all edges (u,v) with u and v having colors: + + (red, red) is 3 # the sole contribution is from edge (0,1) + (red, blue) is 2 # contributions from edges (0,2) and (1,2) + (blue, red) is 2 # same as (red, blue) since graph is undirected + (blue, blue) is 0 # there are no edges with blue endpoints + + """ + import numpy as np + import scipy as sp + + edge_value = _edge_value(G, edge_attr) + node_value = _node_value(G, node_attr) + + if rc_order is None: + ordering = list({node_value(n) for n in G}) + else: + ordering = rc_order + + N = len(ordering) + undirected = not G.is_directed() + index = dict(zip(ordering, range(N))) + M = sp.sparse.lil_array((N, N), dtype=dtype) + + seen = set() + for u, nbrdict in G.adjacency(): + for v in nbrdict: + # Obtain the node attribute values. + i, j = index[node_value(u)], index[node_value(v)] + if v not in seen: + M[i, j] += edge_value(u, v) + if undirected: + M[j, i] = M[i, j] + + if undirected: + seen.add(u) + + if normalized: + M *= 1 / M.sum(axis=1)[:, np.newaxis] # in-place mult preserves sparse + + if rc_order is None: + return M, ordering + else: + return M diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/bethehessianmatrix.py b/.venv/lib/python3.12/site-packages/networkx/linalg/bethehessianmatrix.py new file mode 100644 index 0000000..3d42fc6 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/bethehessianmatrix.py @@ -0,0 +1,79 @@ +"""Bethe Hessian or deformed Laplacian matrix of graphs.""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["bethe_hessian_matrix"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable +def bethe_hessian_matrix(G, r=None, nodelist=None): + r"""Returns the Bethe Hessian matrix of G. + + The Bethe Hessian is a family of matrices parametrized by r, defined as + H(r) = (r^2 - 1) I - r A + D where A is the adjacency matrix, D is the + diagonal matrix of node degrees, and I is the identify matrix. It is equal + to the graph laplacian when the regularizer r = 1. + + The default choice of regularizer should be the ratio [2]_ + + .. math:: + r_m = \left(\sum k_i \right)^{-1}\left(\sum k_i^2 \right) - 1 + + Parameters + ---------- + G : Graph + A NetworkX graph + r : float + Regularizer parameter + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by ``G.nodes()``. + + Returns + ------- + H : scipy.sparse.csr_array + The Bethe Hessian matrix of `G`, with parameter `r`. + + Examples + -------- + >>> k = [3, 2, 2, 1, 0] + >>> G = nx.havel_hakimi_graph(k) + >>> H = nx.bethe_hessian_matrix(G) + >>> H.toarray() + array([[ 3.5625, -1.25 , -1.25 , -1.25 , 0. ], + [-1.25 , 2.5625, -1.25 , 0. , 0. ], + [-1.25 , -1.25 , 2.5625, 0. , 0. ], + [-1.25 , 0. , 0. , 1.5625, 0. ], + [ 0. , 0. , 0. , 0. , 0.5625]]) + + See Also + -------- + bethe_hessian_spectrum + adjacency_matrix + laplacian_matrix + + References + ---------- + .. [1] A. Saade, F. Krzakala and L. Zdeborová + "Spectral Clustering of Graphs with the Bethe Hessian", + Advances in Neural Information Processing Systems, 2014. + .. [2] C. M. Le, E. Levina + "Estimating the number of communities in networks by spectral methods" + arXiv:1507.00827, 2015. + """ + import scipy as sp + + if nodelist is None: + nodelist = list(G) + if r is None: + r = sum(d**2 for v, d in nx.degree(G)) / sum(d for v, d in nx.degree(G)) - 1 + A = nx.to_scipy_sparse_array(G, nodelist=nodelist, format="csr") + n, m = A.shape + # TODO: Rm csr_array wrapper when spdiags array creation becomes available + D = sp.sparse.csr_array(sp.sparse.spdiags(A.sum(axis=1), 0, m, n, format="csr")) + # TODO: Rm csr_array wrapper when eye array creation becomes available + I = sp.sparse.csr_array(sp.sparse.eye(m, n, format="csr")) + return (r**2 - 1) * I - r * A + D diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/graphmatrix.py b/.venv/lib/python3.12/site-packages/networkx/linalg/graphmatrix.py new file mode 100644 index 0000000..9f477bc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/graphmatrix.py @@ -0,0 +1,168 @@ +""" +Adjacency matrix and incidence matrix of graphs. +""" + +import networkx as nx + +__all__ = ["incidence_matrix", "adjacency_matrix"] + + +@nx._dispatchable(edge_attrs="weight") +def incidence_matrix( + G, nodelist=None, edgelist=None, oriented=False, weight=None, *, dtype=None +): + """Returns incidence matrix of G. + + The incidence matrix assigns each row to a node and each column to an edge. + For a standard incidence matrix a 1 appears wherever a row's node is + incident on the column's edge. For an oriented incidence matrix each + edge is assigned an orientation (arbitrarily for undirected and aligning to + direction for directed). A -1 appears for the source (tail) of an edge and + 1 for the destination (head) of the edge. The elements are zero otherwise. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list, optional (default= all nodes in G) + The rows are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + edgelist : list, optional (default= all edges in G) + The columns are ordered according to the edges in edgelist. + If edgelist is None, then the ordering is produced by G.edges(). + + oriented: bool, optional (default=False) + If True, matrix elements are +1 or -1 for the head or tail node + respectively of each edge. If False, +1 occurs at both nodes. + + weight : string or None, optional (default=None) + The edge data key used to provide each value in the matrix. + If None, then each edge has weight 1. Edge weights, if used, + should be positive so that the orientation can provide the sign. + + dtype : a NumPy dtype or None (default=None) + The dtype of the output sparse array. This type should be a compatible + type of the weight argument, eg. if weight would return a float this + argument should also be a float. + If None, then the default for SciPy is used. + + Returns + ------- + A : SciPy sparse array + The incidence matrix of G. + + Notes + ----- + For MultiGraph/MultiDiGraph, the edges in edgelist should be + (u,v,key) 3-tuples. + + "Networks are the best discrete model for so many problems in + applied mathematics" [1]_. + + References + ---------- + .. [1] Gil Strang, Network applications: A = incidence matrix, + http://videolectures.net/mit18085f07_strang_lec03/ + """ + import scipy as sp + + if nodelist is None: + nodelist = list(G) + if edgelist is None: + if G.is_multigraph(): + edgelist = list(G.edges(keys=True)) + else: + edgelist = list(G.edges()) + A = sp.sparse.lil_array((len(nodelist), len(edgelist)), dtype=dtype) + node_index = {node: i for i, node in enumerate(nodelist)} + for ei, e in enumerate(edgelist): + (u, v) = e[:2] + if u == v: + continue # self loops give zero column + try: + ui = node_index[u] + vi = node_index[v] + except KeyError as err: + raise nx.NetworkXError( + f"node {u} or {v} in edgelist but not in nodelist" + ) from err + if weight is None: + wt = 1 + else: + if G.is_multigraph(): + ekey = e[2] + wt = G[u][v][ekey].get(weight, 1) + else: + wt = G[u][v].get(weight, 1) + if oriented: + A[ui, ei] = -wt + A[vi, ei] = wt + else: + A[ui, ei] = wt + A[vi, ei] = wt + return A.asformat("csc") + + +@nx._dispatchable(edge_attrs="weight") +def adjacency_matrix(G, nodelist=None, dtype=None, weight="weight"): + """Returns adjacency matrix of `G`. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in `nodelist`. + If ``nodelist=None`` (the default), then the ordering is produced by + ``G.nodes()``. + + dtype : NumPy data-type, optional + The desired data-type for the array. + If `None`, then the NumPy default is used. + + weight : string or None, optional (default='weight') + The edge data key used to provide each value in the matrix. + If None, then each edge has weight 1. + + Returns + ------- + A : SciPy sparse array + Adjacency matrix representation of G. + + Notes + ----- + For directed graphs, entry ``i, j`` corresponds to an edge from ``i`` to ``j``. + + If you want a pure Python adjacency matrix representation try + :func:`~networkx.convert.to_dict_of_dicts` which will return a + dictionary-of-dictionaries format that can be addressed as a + sparse matrix. + + For multigraphs with parallel edges the weights are summed. + See :func:`networkx.convert_matrix.to_numpy_array` for other options. + + The convention used for self-loop edges in graphs is to assign the + diagonal matrix entry value to the edge weight attribute + (or the number 1 if the edge has no weight attribute). If the + alternate convention of doubling the edge weight is desired the + resulting SciPy sparse array can be modified as follows:: + + >>> G = nx.Graph([(1, 1)]) + >>> A = nx.adjacency_matrix(G) + >>> A.toarray() + array([[1]]) + >>> A.setdiag(A.diagonal() * 2) + >>> A.toarray() + array([[2]]) + + See Also + -------- + to_numpy_array + to_scipy_sparse_array + to_dict_of_dicts + adjacency_spectrum + """ + return nx.to_scipy_sparse_array(G, nodelist=nodelist, dtype=dtype, weight=weight) diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/laplacianmatrix.py b/.venv/lib/python3.12/site-packages/networkx/linalg/laplacianmatrix.py new file mode 100644 index 0000000..690924f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/laplacianmatrix.py @@ -0,0 +1,520 @@ +"""Laplacian matrix of graphs. + +All calculations here are done using the out-degree. For Laplacians using +in-degree, use `G.reverse(copy=False)` instead of `G` and take the transpose. + +The `laplacian_matrix` function provides an unnormalized matrix, +while `normalized_laplacian_matrix`, `directed_laplacian_matrix`, +and `directed_combinatorial_laplacian_matrix` are all normalized. +""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = [ + "laplacian_matrix", + "normalized_laplacian_matrix", + "directed_laplacian_matrix", + "directed_combinatorial_laplacian_matrix", +] + + +@nx._dispatchable(edge_attrs="weight") +def laplacian_matrix(G, nodelist=None, weight="weight"): + """Returns the Laplacian matrix of G. + + The graph Laplacian is the matrix L = D - A, where + A is the adjacency matrix and D is the diagonal matrix of node degrees. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + Returns + ------- + L : SciPy sparse array + The Laplacian matrix of G. + + Notes + ----- + For MultiGraph, the edges weights are summed. + + This returns an unnormalized matrix. For a normalized output, + use `normalized_laplacian_matrix`, `directed_laplacian_matrix`, + or `directed_combinatorial_laplacian_matrix`. + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. + + See Also + -------- + :func:`~networkx.convert_matrix.to_numpy_array` + normalized_laplacian_matrix + directed_laplacian_matrix + directed_combinatorial_laplacian_matrix + :func:`~networkx.linalg.spectrum.laplacian_spectrum` + + Examples + -------- + For graphs with multiple connected components, L is permutation-similar + to a block diagonal matrix where each block is the respective Laplacian + matrix for each component. + + >>> G = nx.Graph([(1, 2), (2, 3), (4, 5)]) + >>> print(nx.laplacian_matrix(G).toarray()) + [[ 1 -1 0 0 0] + [-1 2 -1 0 0] + [ 0 -1 1 0 0] + [ 0 0 0 1 -1] + [ 0 0 0 -1 1]] + + >>> edges = [ + ... (1, 2), + ... (2, 1), + ... (2, 4), + ... (4, 3), + ... (3, 4), + ... ] + >>> DiG = nx.DiGraph(edges) + >>> print(nx.laplacian_matrix(DiG).toarray()) + [[ 1 -1 0 0] + [-1 2 -1 0] + [ 0 0 1 -1] + [ 0 0 -1 1]] + + Notice that node 4 is represented by the third column and row. This is because + by default the row/column order is the order of `G.nodes` (i.e. the node added + order -- in the edgelist, 4 first appears in (2, 4), before node 3 in edge (4, 3).) + To control the node order of the matrix, use the `nodelist` argument. + + >>> print(nx.laplacian_matrix(DiG, nodelist=[1, 2, 3, 4]).toarray()) + [[ 1 -1 0 0] + [-1 2 0 -1] + [ 0 0 1 -1] + [ 0 0 -1 1]] + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. + + >>> print(nx.laplacian_matrix(DiG.reverse(copy=False)).toarray().T) + [[ 1 -1 0 0] + [-1 1 -1 0] + [ 0 0 2 -1] + [ 0 0 -1 1]] + + References + ---------- + .. [1] Langville, Amy N., and Carl D. Meyer. Google’s PageRank and Beyond: + The Science of Search Engine Rankings. Princeton University Press, 2006. + + """ + import scipy as sp + + if nodelist is None: + nodelist = list(G) + A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr") + n, m = A.shape + # TODO: rm csr_array wrapper when spdiags can produce arrays + D = sp.sparse.csr_array(sp.sparse.spdiags(A.sum(axis=1), 0, m, n, format="csr")) + return D - A + + +@nx._dispatchable(edge_attrs="weight") +def normalized_laplacian_matrix(G, nodelist=None, weight="weight"): + r"""Returns the normalized Laplacian matrix of G. + + The normalized graph Laplacian is the matrix + + .. math:: + + N = D^{-1/2} L D^{-1/2} + + where `L` is the graph Laplacian and `D` is the diagonal matrix of + node degrees [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + Returns + ------- + N : SciPy sparse array + The normalized Laplacian matrix of G. + + Notes + ----- + For MultiGraph, the edges weights are summed. + See :func:`to_numpy_array` for other options. + + If the Graph contains selfloops, D is defined as ``diag(sum(A, 1))``, where A is + the adjacency matrix [2]_. + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. + + For an unnormalized output, use `laplacian_matrix`. + + Examples + -------- + + >>> import numpy as np + >>> edges = [ + ... (1, 2), + ... (2, 1), + ... (2, 4), + ... (4, 3), + ... (3, 4), + ... ] + >>> DiG = nx.DiGraph(edges) + >>> print(nx.normalized_laplacian_matrix(DiG).toarray()) + [[ 1. -0.70710678 0. 0. ] + [-0.70710678 1. -0.70710678 0. ] + [ 0. 0. 1. -1. ] + [ 0. 0. -1. 1. ]] + + Notice that node 4 is represented by the third column and row. This is because + by default the row/column order is the order of `G.nodes` (i.e. the node added + order -- in the edgelist, 4 first appears in (2, 4), before node 3 in edge (4, 3).) + To control the node order of the matrix, use the `nodelist` argument. + + >>> print(nx.normalized_laplacian_matrix(DiG, nodelist=[1, 2, 3, 4]).toarray()) + [[ 1. -0.70710678 0. 0. ] + [-0.70710678 1. 0. -0.70710678] + [ 0. 0. 1. -1. ] + [ 0. 0. -1. 1. ]] + >>> G = nx.Graph(edges) + >>> print(nx.normalized_laplacian_matrix(G).toarray()) + [[ 1. -0.70710678 0. 0. ] + [-0.70710678 1. -0.5 0. ] + [ 0. -0.5 1. -0.70710678] + [ 0. 0. -0.70710678 1. ]] + + See Also + -------- + laplacian_matrix + normalized_laplacian_spectrum + directed_laplacian_matrix + directed_combinatorial_laplacian_matrix + + References + ---------- + .. [1] Fan Chung-Graham, Spectral Graph Theory, + CBMS Regional Conference Series in Mathematics, Number 92, 1997. + .. [2] Steve Butler, Interlacing For Weighted Graphs Using The Normalized + Laplacian, Electronic Journal of Linear Algebra, Volume 16, pp. 90-98, + March 2007. + .. [3] Langville, Amy N., and Carl D. Meyer. Google’s PageRank and Beyond: + The Science of Search Engine Rankings. Princeton University Press, 2006. + """ + import numpy as np + import scipy as sp + + if nodelist is None: + nodelist = list(G) + A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr") + n, _ = A.shape + diags = A.sum(axis=1) + # TODO: rm csr_array wrapper when spdiags can produce arrays + D = sp.sparse.csr_array(sp.sparse.spdiags(diags, 0, n, n, format="csr")) + L = D - A + with np.errstate(divide="ignore"): + diags_sqrt = 1.0 / np.sqrt(diags) + diags_sqrt[np.isinf(diags_sqrt)] = 0 + # TODO: rm csr_array wrapper when spdiags can produce arrays + DH = sp.sparse.csr_array(sp.sparse.spdiags(diags_sqrt, 0, n, n, format="csr")) + return DH @ (L @ DH) + + +############################################################################### +# Code based on work from https://github.com/bjedwards + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def directed_laplacian_matrix( + G, nodelist=None, weight="weight", walk_type=None, alpha=0.95 +): + r"""Returns the directed Laplacian matrix of G. + + The graph directed Laplacian is the matrix + + .. math:: + + L = I - \frac{1}{2} \left (\Phi^{1/2} P \Phi^{-1/2} + \Phi^{-1/2} P^T \Phi^{1/2} \right ) + + where `I` is the identity matrix, `P` is the transition matrix of the + graph, and `\Phi` a matrix with the Perron vector of `P` in the diagonal and + zeros elsewhere [1]_. + + Depending on the value of walk_type, `P` can be the transition matrix + induced by a random walk, a lazy random walk, or a random walk with + teleportation (PageRank). + + Parameters + ---------- + G : DiGraph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + walk_type : string or None, optional (default=None) + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. + + alpha : real + (1 - alpha) is the teleportation probability used with pagerank + + Returns + ------- + L : NumPy matrix + Normalized Laplacian of G. + + Notes + ----- + Only implemented for DiGraphs + + The result is always a symmetric matrix. + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. + + See Also + -------- + laplacian_matrix + normalized_laplacian_matrix + directed_combinatorial_laplacian_matrix + + References + ---------- + .. [1] Fan Chung (2005). + Laplacians and the Cheeger inequality for directed graphs. + Annals of Combinatorics, 9(1), 2005 + """ + import numpy as np + import scipy as sp + + # NOTE: P has type ndarray if walk_type=="pagerank", else csr_array + P = _transition_matrix( + G, nodelist=nodelist, weight=weight, walk_type=walk_type, alpha=alpha + ) + + n, m = P.shape + + evals, evecs = sp.sparse.linalg.eigs(P.T, k=1) + v = evecs.flatten().real + p = v / v.sum() + # p>=0 by Perron-Frobenius Thm. Use abs() to fix roundoff across zero gh-6865 + sqrtp = np.sqrt(np.abs(p)) + Q = ( + # TODO: rm csr_array wrapper when spdiags creates arrays + sp.sparse.csr_array(sp.sparse.spdiags(sqrtp, 0, n, n)) + @ P + # TODO: rm csr_array wrapper when spdiags creates arrays + @ sp.sparse.csr_array(sp.sparse.spdiags(1.0 / sqrtp, 0, n, n)) + ) + # NOTE: This could be sparsified for the non-pagerank cases + I = np.identity(len(G)) + + return I - (Q + Q.T) / 2.0 + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def directed_combinatorial_laplacian_matrix( + G, nodelist=None, weight="weight", walk_type=None, alpha=0.95 +): + r"""Return the directed combinatorial Laplacian matrix of G. + + The graph directed combinatorial Laplacian is the matrix + + .. math:: + + L = \Phi - \frac{1}{2} \left (\Phi P + P^T \Phi \right) + + where `P` is the transition matrix of the graph and `\Phi` a matrix + with the Perron vector of `P` in the diagonal and zeros elsewhere [1]_. + + Depending on the value of walk_type, `P` can be the transition matrix + induced by a random walk, a lazy random walk, or a random walk with + teleportation (PageRank). + + Parameters + ---------- + G : DiGraph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + walk_type : string or None, optional (default=None) + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. + + alpha : real + (1 - alpha) is the teleportation probability used with pagerank + + Returns + ------- + L : NumPy matrix + Combinatorial Laplacian of G. + + Notes + ----- + Only implemented for DiGraphs + + The result is always a symmetric matrix. + + This calculation uses the out-degree of the graph `G`. To use the + in-degree for calculations instead, use `G.reverse(copy=False)` and + take the transpose. + + See Also + -------- + laplacian_matrix + normalized_laplacian_matrix + directed_laplacian_matrix + + References + ---------- + .. [1] Fan Chung (2005). + Laplacians and the Cheeger inequality for directed graphs. + Annals of Combinatorics, 9(1), 2005 + """ + import scipy as sp + + P = _transition_matrix( + G, nodelist=nodelist, weight=weight, walk_type=walk_type, alpha=alpha + ) + + n, m = P.shape + + evals, evecs = sp.sparse.linalg.eigs(P.T, k=1) + v = evecs.flatten().real + p = v / v.sum() + # NOTE: could be improved by not densifying + # TODO: Rm csr_array wrapper when spdiags array creation becomes available + Phi = sp.sparse.csr_array(sp.sparse.spdiags(p, 0, n, n)).toarray() + + return Phi - (Phi @ P + P.T @ Phi) / 2.0 + + +def _transition_matrix(G, nodelist=None, weight="weight", walk_type=None, alpha=0.95): + """Returns the transition matrix of G. + + This is a row stochastic giving the transition probabilities while + performing a random walk on the graph. Depending on the value of walk_type, + P can be the transition matrix induced by a random walk, a lazy random walk, + or a random walk with teleportation (PageRank). + + Parameters + ---------- + G : DiGraph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + walk_type : string or None, optional (default=None) + One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None`` + (the default), then a value is selected according to the properties of `G`: + - ``walk_type="random"`` if `G` is strongly connected and aperiodic + - ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic + - ``walk_type="pagerank"`` for all other cases. + + alpha : real + (1 - alpha) is the teleportation probability used with pagerank + + Returns + ------- + P : numpy.ndarray + transition matrix of G. + + Raises + ------ + NetworkXError + If walk_type not specified or alpha not in valid range + """ + import numpy as np + import scipy as sp + + if walk_type is None: + if nx.is_strongly_connected(G): + if nx.is_aperiodic(G): + walk_type = "random" + else: + walk_type = "lazy" + else: + walk_type = "pagerank" + + A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, dtype=float) + n, m = A.shape + if walk_type in ["random", "lazy"]: + # TODO: Rm csr_array wrapper when spdiags array creation becomes available + DI = sp.sparse.csr_array(sp.sparse.spdiags(1.0 / A.sum(axis=1), 0, n, n)) + if walk_type == "random": + P = DI @ A + else: + # TODO: Rm csr_array wrapper when identity array creation becomes available + I = sp.sparse.csr_array(sp.sparse.identity(n)) + P = (I + DI @ A) / 2.0 + + elif walk_type == "pagerank": + if not (0 < alpha < 1): + raise nx.NetworkXError("alpha must be between 0 and 1") + # this is using a dense representation. NOTE: This should be sparsified! + A = A.toarray() + # add constant to dangling nodes' row + A[A.sum(axis=1) == 0, :] = 1 / n + # normalize + A = A / A.sum(axis=1)[np.newaxis, :].T + P = alpha * A + (1 - alpha) / n + else: + raise nx.NetworkXError("walk_type must be random, lazy, or pagerank") + + return P diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/modularitymatrix.py b/.venv/lib/python3.12/site-packages/networkx/linalg/modularitymatrix.py new file mode 100644 index 0000000..0287910 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/modularitymatrix.py @@ -0,0 +1,166 @@ +"""Modularity matrix of graphs.""" + +import networkx as nx +from networkx.utils import not_implemented_for + +__all__ = ["modularity_matrix", "directed_modularity_matrix"] + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def modularity_matrix(G, nodelist=None, weight=None): + r"""Returns the modularity matrix of G. + + The modularity matrix is the matrix B = A - , where A is the adjacency + matrix and is the average adjacency matrix, assuming that the graph + is described by the configuration model. + + More specifically, the element B_ij of B is defined as + + .. math:: + A_{ij} - {k_i k_j \over 2 m} + + where k_i is the degree of node i, and where m is the number of edges + in the graph. When weight is set to a name of an attribute edge, Aij, k_i, + k_j and m are computed using its value. + + Parameters + ---------- + G : Graph + A NetworkX graph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used for + the edge weight. If None then all edge weights are 1. + + Returns + ------- + B : Numpy array + The modularity matrix of G. + + Examples + -------- + >>> k = [3, 2, 2, 1, 0] + >>> G = nx.havel_hakimi_graph(k) + >>> B = nx.modularity_matrix(G) + + + See Also + -------- + to_numpy_array + modularity_spectrum + adjacency_matrix + directed_modularity_matrix + + References + ---------- + .. [1] M. E. J. Newman, "Modularity and community structure in networks", + Proc. Natl. Acad. Sci. USA, vol. 103, pp. 8577-8582, 2006. + """ + import numpy as np + + if nodelist is None: + nodelist = list(G) + A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr") + k = A.sum(axis=1) + m = k.sum() * 0.5 + # Expected adjacency matrix + X = np.outer(k, k) / (2 * m) + + return A - X + + +@not_implemented_for("undirected") +@not_implemented_for("multigraph") +@nx._dispatchable(edge_attrs="weight") +def directed_modularity_matrix(G, nodelist=None, weight=None): + """Returns the directed modularity matrix of G. + + The modularity matrix is the matrix B = A - , where A is the adjacency + matrix and is the expected adjacency matrix, assuming that the graph + is described by the configuration model. + + More specifically, the element B_ij of B is defined as + + .. math:: + B_{ij} = A_{ij} - k_i^{out} k_j^{in} / m + + where :math:`k_i^{in}` is the in degree of node i, and :math:`k_j^{out}` is the out degree + of node j, with m the number of edges in the graph. When weight is set + to a name of an attribute edge, Aij, k_i, k_j and m are computed using + its value. + + Parameters + ---------- + G : DiGraph + A NetworkX DiGraph + + nodelist : list, optional + The rows and columns are ordered according to the nodes in nodelist. + If nodelist is None, then the ordering is produced by G.nodes(). + + weight : string or None, optional (default=None) + The edge attribute that holds the numerical value used for + the edge weight. If None then all edge weights are 1. + + Returns + ------- + B : Numpy array + The modularity matrix of G. + + Examples + -------- + >>> G = nx.DiGraph() + >>> G.add_edges_from( + ... ( + ... (1, 2), + ... (1, 3), + ... (3, 1), + ... (3, 2), + ... (3, 5), + ... (4, 5), + ... (4, 6), + ... (5, 4), + ... (5, 6), + ... (6, 4), + ... ) + ... ) + >>> B = nx.directed_modularity_matrix(G) + + + Notes + ----- + NetworkX defines the element A_ij of the adjacency matrix as 1 if there + is a link going from node i to node j. Leicht and Newman use the opposite + definition. This explains the different expression for B_ij. + + See Also + -------- + to_numpy_array + modularity_spectrum + adjacency_matrix + modularity_matrix + + References + ---------- + .. [1] E. A. Leicht, M. E. J. Newman, + "Community structure in directed networks", + Phys. Rev Lett., vol. 100, no. 11, p. 118703, 2008. + """ + import numpy as np + + if nodelist is None: + nodelist = list(G) + A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr") + k_in = A.sum(axis=0) + k_out = A.sum(axis=1) + m = k_in.sum() + # Expected adjacency matrix + X = np.outer(k_out, k_in) / m + + return A - X diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/spectrum.py b/.venv/lib/python3.12/site-packages/networkx/linalg/spectrum.py new file mode 100644 index 0000000..079b185 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/spectrum.py @@ -0,0 +1,186 @@ +""" +Eigenvalue spectrum of graphs. +""" + +import networkx as nx + +__all__ = [ + "laplacian_spectrum", + "adjacency_spectrum", + "modularity_spectrum", + "normalized_laplacian_spectrum", + "bethe_hessian_spectrum", +] + + +@nx._dispatchable(edge_attrs="weight") +def laplacian_spectrum(G, weight="weight"): + """Returns eigenvalues of the Laplacian of G + + Parameters + ---------- + G : graph + A NetworkX graph + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + Returns + ------- + evals : NumPy array + Eigenvalues + + Notes + ----- + For MultiGraph/MultiDiGraph, the edges weights are summed. + See :func:`~networkx.convert_matrix.to_numpy_array` for other options. + + See Also + -------- + laplacian_matrix + + Examples + -------- + The multiplicity of 0 as an eigenvalue of the laplacian matrix is equal + to the number of connected components of G. + + >>> G = nx.Graph() # Create a graph with 5 nodes and 3 connected components + >>> G.add_nodes_from(range(5)) + >>> G.add_edges_from([(0, 2), (3, 4)]) + >>> nx.laplacian_spectrum(G) + array([0., 0., 0., 2., 2.]) + + """ + import scipy as sp + + return sp.linalg.eigvalsh(nx.laplacian_matrix(G, weight=weight).todense()) + + +@nx._dispatchable(edge_attrs="weight") +def normalized_laplacian_spectrum(G, weight="weight"): + """Return eigenvalues of the normalized Laplacian of G + + Parameters + ---------- + G : graph + A NetworkX graph + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + Returns + ------- + evals : NumPy array + Eigenvalues + + Notes + ----- + For MultiGraph/MultiDiGraph, the edges weights are summed. + See to_numpy_array for other options. + + See Also + -------- + normalized_laplacian_matrix + """ + import scipy as sp + + return sp.linalg.eigvalsh( + nx.normalized_laplacian_matrix(G, weight=weight).todense() + ) + + +@nx._dispatchable(edge_attrs="weight") +def adjacency_spectrum(G, weight="weight"): + """Returns eigenvalues of the adjacency matrix of G. + + Parameters + ---------- + G : graph + A NetworkX graph + + weight : string or None, optional (default='weight') + The edge data key used to compute each value in the matrix. + If None, then each edge has weight 1. + + Returns + ------- + evals : NumPy array + Eigenvalues + + Notes + ----- + For MultiGraph/MultiDiGraph, the edges weights are summed. + See to_numpy_array for other options. + + See Also + -------- + adjacency_matrix + """ + import scipy as sp + + return sp.linalg.eigvals(nx.adjacency_matrix(G, weight=weight).todense()) + + +@nx._dispatchable +def modularity_spectrum(G): + """Returns eigenvalues of the modularity matrix of G. + + Parameters + ---------- + G : Graph + A NetworkX Graph or DiGraph + + Returns + ------- + evals : NumPy array + Eigenvalues + + See Also + -------- + modularity_matrix + + References + ---------- + .. [1] M. E. J. Newman, "Modularity and community structure in networks", + Proc. Natl. Acad. Sci. USA, vol. 103, pp. 8577-8582, 2006. + """ + import scipy as sp + + if G.is_directed(): + return sp.linalg.eigvals(nx.directed_modularity_matrix(G)) + else: + return sp.linalg.eigvals(nx.modularity_matrix(G)) + + +@nx._dispatchable +def bethe_hessian_spectrum(G, r=None): + """Returns eigenvalues of the Bethe Hessian matrix of G. + + Parameters + ---------- + G : Graph + A NetworkX Graph or DiGraph + + r : float + Regularizer parameter + + Returns + ------- + evals : NumPy array + Eigenvalues + + See Also + -------- + bethe_hessian_matrix + + References + ---------- + .. [1] A. Saade, F. Krzakala and L. Zdeborová + "Spectral clustering of graphs with the bethe hessian", + Advances in Neural Information Processing Systems. 2014. + """ + import scipy as sp + + return sp.linalg.eigvalsh(nx.bethe_hessian_matrix(G, r).todense()) diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..1b557c3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_algebraic_connectivity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_algebraic_connectivity.cpython-312.pyc new file mode 100644 index 0000000..c5fcbe2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_algebraic_connectivity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_attrmatrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_attrmatrix.cpython-312.pyc new file mode 100644 index 0000000..16a2025 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_attrmatrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_bethehessian.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_bethehessian.cpython-312.pyc new file mode 100644 index 0000000..96a0466 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_bethehessian.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_graphmatrix.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_graphmatrix.cpython-312.pyc new file mode 100644 index 0000000..e9da338 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_graphmatrix.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_laplacian.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_laplacian.cpython-312.pyc new file mode 100644 index 0000000..1fcbd18 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_laplacian.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_modularity.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_modularity.cpython-312.pyc new file mode 100644 index 0000000..dee0698 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_modularity.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_spectrum.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_spectrum.cpython-312.pyc new file mode 100644 index 0000000..c7bcc54 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/__pycache__/test_spectrum.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_algebraic_connectivity.py b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_algebraic_connectivity.py new file mode 100644 index 0000000..31f911f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_algebraic_connectivity.py @@ -0,0 +1,400 @@ +from math import sqrt + +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +methods = ("tracemin_pcg", "tracemin_lu", "lanczos", "lobpcg") + + +def test_algebraic_connectivity_tracemin_chol(): + """Test that "tracemin_chol" raises an exception.""" + pytest.importorskip("scipy") + G = nx.barbell_graph(5, 4) + with pytest.raises(nx.NetworkXError): + nx.algebraic_connectivity(G, method="tracemin_chol") + + +def test_fiedler_vector_tracemin_chol(): + """Test that "tracemin_chol" raises an exception.""" + pytest.importorskip("scipy") + G = nx.barbell_graph(5, 4) + with pytest.raises(nx.NetworkXError): + nx.fiedler_vector(G, method="tracemin_chol") + + +def test_spectral_ordering_tracemin_chol(): + """Test that "tracemin_chol" raises an exception.""" + pytest.importorskip("scipy") + G = nx.barbell_graph(5, 4) + with pytest.raises(nx.NetworkXError): + nx.spectral_ordering(G, method="tracemin_chol") + + +def test_fiedler_vector_tracemin_unknown(): + """Test that "tracemin_unknown" raises an exception.""" + pytest.importorskip("scipy") + G = nx.barbell_graph(5, 4) + L = nx.laplacian_matrix(G) + X = np.asarray(np.random.normal(size=(1, L.shape[0]))).T + with pytest.raises(nx.NetworkXError, match="Unknown linear system solver"): + nx.linalg.algebraicconnectivity._tracemin_fiedler( + L, X, normalized=False, tol=1e-8, method="tracemin_unknown" + ) + + +def test_spectral_bisection(): + pytest.importorskip("scipy") + G = nx.barbell_graph(3, 0) + C = nx.spectral_bisection(G) + assert C == ({0, 1, 2}, {3, 4, 5}) + + mapping = dict(enumerate("badfec")) + G = nx.relabel_nodes(G, mapping) + C = nx.spectral_bisection(G) + assert C == ( + {mapping[0], mapping[1], mapping[2]}, + {mapping[3], mapping[4], mapping[5]}, + ) + + +def check_eigenvector(A, l, x): + nx = np.linalg.norm(x) + # Check zeroness. + assert nx != pytest.approx(0, abs=1e-07) + y = A @ x + ny = np.linalg.norm(y) + # Check collinearity. + assert x @ y == pytest.approx(nx * ny, abs=1e-7) + # Check eigenvalue. + assert ny == pytest.approx(l * nx, abs=1e-7) + + +class TestAlgebraicConnectivity: + @pytest.mark.parametrize("method", methods) + def test_directed(self, method): + G = nx.DiGraph() + pytest.raises( + nx.NetworkXNotImplemented, nx.algebraic_connectivity, G, method=method + ) + pytest.raises(nx.NetworkXNotImplemented, nx.fiedler_vector, G, method=method) + + @pytest.mark.parametrize("method", methods) + def test_null_and_singleton(self, method): + G = nx.Graph() + pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method=method) + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) + G.add_edge(0, 0) + pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method=method) + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) + + @pytest.mark.parametrize("method", methods) + def test_disconnected(self, method): + G = nx.Graph() + G.add_nodes_from(range(2)) + assert nx.algebraic_connectivity(G) == 0 + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) + G.add_edge(0, 1, weight=0) + assert nx.algebraic_connectivity(G) == 0 + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method=method) + + def test_unrecognized_method(self): + pytest.importorskip("scipy") + G = nx.path_graph(4) + pytest.raises(nx.NetworkXError, nx.algebraic_connectivity, G, method="unknown") + pytest.raises(nx.NetworkXError, nx.fiedler_vector, G, method="unknown") + + @pytest.mark.parametrize("method", methods) + def test_two_nodes(self, method): + pytest.importorskip("scipy") + G = nx.Graph() + G.add_edge(0, 1, weight=1) + A = nx.laplacian_matrix(G) + assert nx.algebraic_connectivity(G, tol=1e-12, method=method) == pytest.approx( + 2, abs=1e-7 + ) + x = nx.fiedler_vector(G, tol=1e-12, method=method) + check_eigenvector(A, 2, x) + + @pytest.mark.parametrize("method", methods) + def test_two_nodes_multigraph(self, method): + pytest.importorskip("scipy") + G = nx.MultiGraph() + G.add_edge(0, 0, spam=1e8) + G.add_edge(0, 1, spam=1) + G.add_edge(0, 1, spam=-2) + A = -3 * nx.laplacian_matrix(G, weight="spam") + assert nx.algebraic_connectivity( + G, weight="spam", tol=1e-12, method=method + ) == pytest.approx(6, abs=1e-7) + x = nx.fiedler_vector(G, weight="spam", tol=1e-12, method=method) + check_eigenvector(A, 6, x) + + def test_abbreviation_of_method(self): + pytest.importorskip("scipy") + G = nx.path_graph(8) + A = nx.laplacian_matrix(G) + sigma = 2 - sqrt(2 + sqrt(2)) + ac = nx.algebraic_connectivity(G, tol=1e-12, method="tracemin") + assert ac == pytest.approx(sigma, abs=1e-7) + x = nx.fiedler_vector(G, tol=1e-12, method="tracemin") + check_eigenvector(A, sigma, x) + + @pytest.mark.parametrize("method", methods) + def test_path(self, method): + pytest.importorskip("scipy") + G = nx.path_graph(8) + A = nx.laplacian_matrix(G) + sigma = 2 - sqrt(2 + sqrt(2)) + ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) + assert ac == pytest.approx(sigma, abs=1e-7) + x = nx.fiedler_vector(G, tol=1e-12, method=method) + check_eigenvector(A, sigma, x) + + @pytest.mark.parametrize("method", methods) + def test_problematic_graph_issue_2381(self, method): + pytest.importorskip("scipy") + G = nx.path_graph(4) + G.add_edges_from([(4, 2), (5, 1)]) + A = nx.laplacian_matrix(G) + sigma = 0.438447187191 + ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) + assert ac == pytest.approx(sigma, abs=1e-7) + x = nx.fiedler_vector(G, tol=1e-12, method=method) + check_eigenvector(A, sigma, x) + + @pytest.mark.parametrize("method", methods) + def test_cycle(self, method): + pytest.importorskip("scipy") + G = nx.cycle_graph(8) + A = nx.laplacian_matrix(G) + sigma = 2 - sqrt(2) + ac = nx.algebraic_connectivity(G, tol=1e-12, method=method) + assert ac == pytest.approx(sigma, abs=1e-7) + x = nx.fiedler_vector(G, tol=1e-12, method=method) + check_eigenvector(A, sigma, x) + + @pytest.mark.parametrize("method", methods) + def test_seed_argument(self, method): + pytest.importorskip("scipy") + G = nx.cycle_graph(8) + A = nx.laplacian_matrix(G) + sigma = 2 - sqrt(2) + ac = nx.algebraic_connectivity(G, tol=1e-12, method=method, seed=1) + assert ac == pytest.approx(sigma, abs=1e-7) + x = nx.fiedler_vector(G, tol=1e-12, method=method, seed=1) + check_eigenvector(A, sigma, x) + + @pytest.mark.parametrize( + ("normalized", "sigma", "laplacian_fn"), + ( + (False, 0.2434017461399311, nx.laplacian_matrix), + (True, 0.08113391537997749, nx.normalized_laplacian_matrix), + ), + ) + @pytest.mark.parametrize("method", methods) + def test_buckminsterfullerene(self, normalized, sigma, laplacian_fn, method): + pytest.importorskip("scipy") + G = nx.Graph( + [ + (1, 10), + (1, 41), + (1, 59), + (2, 12), + (2, 42), + (2, 60), + (3, 6), + (3, 43), + (3, 57), + (4, 8), + (4, 44), + (4, 58), + (5, 13), + (5, 56), + (5, 57), + (6, 10), + (6, 31), + (7, 14), + (7, 56), + (7, 58), + (8, 12), + (8, 32), + (9, 23), + (9, 53), + (9, 59), + (10, 15), + (11, 24), + (11, 53), + (11, 60), + (12, 16), + (13, 14), + (13, 25), + (14, 26), + (15, 27), + (15, 49), + (16, 28), + (16, 50), + (17, 18), + (17, 19), + (17, 54), + (18, 20), + (18, 55), + (19, 23), + (19, 41), + (20, 24), + (20, 42), + (21, 31), + (21, 33), + (21, 57), + (22, 32), + (22, 34), + (22, 58), + (23, 24), + (25, 35), + (25, 43), + (26, 36), + (26, 44), + (27, 51), + (27, 59), + (28, 52), + (28, 60), + (29, 33), + (29, 34), + (29, 56), + (30, 51), + (30, 52), + (30, 53), + (31, 47), + (32, 48), + (33, 45), + (34, 46), + (35, 36), + (35, 37), + (36, 38), + (37, 39), + (37, 49), + (38, 40), + (38, 50), + (39, 40), + (39, 51), + (40, 52), + (41, 47), + (42, 48), + (43, 49), + (44, 50), + (45, 46), + (45, 54), + (46, 55), + (47, 54), + (48, 55), + ] + ) + A = laplacian_fn(G) + try: + assert nx.algebraic_connectivity( + G, normalized=normalized, tol=1e-12, method=method + ) == pytest.approx(sigma, abs=1e-7) + x = nx.fiedler_vector(G, normalized=normalized, tol=1e-12, method=method) + check_eigenvector(A, sigma, x) + except nx.NetworkXError as err: + if err.args not in ( + ("Cholesky solver unavailable.",), + ("LU solver unavailable.",), + ): + raise + + +class TestSpectralOrdering: + _graphs = (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph) + + @pytest.mark.parametrize("graph", _graphs) + def test_nullgraph(self, graph): + G = graph() + pytest.raises(nx.NetworkXError, nx.spectral_ordering, G) + + @pytest.mark.parametrize("graph", _graphs) + def test_singleton(self, graph): + G = graph() + G.add_node("x") + assert nx.spectral_ordering(G) == ["x"] + G.add_edge("x", "x", weight=33) + G.add_edge("x", "x", weight=33) + assert nx.spectral_ordering(G) == ["x"] + + def test_unrecognized_method(self): + G = nx.path_graph(4) + pytest.raises(nx.NetworkXError, nx.spectral_ordering, G, method="unknown") + + @pytest.mark.parametrize("method", methods) + def test_three_nodes(self, method): + pytest.importorskip("scipy") + G = nx.Graph() + G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1)], weight="spam") + order = nx.spectral_ordering(G, weight="spam", method=method) + assert set(order) == set(G) + assert {1, 3} in (set(order[:-1]), set(order[1:])) + + @pytest.mark.parametrize("method", methods) + def test_three_nodes_multigraph(self, method): + pytest.importorskip("scipy") + G = nx.MultiDiGraph() + G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1), (2, 3, 2)]) + order = nx.spectral_ordering(G, method=method) + assert set(order) == set(G) + assert {2, 3} in (set(order[:-1]), set(order[1:])) + + @pytest.mark.parametrize("method", methods) + def test_path(self, method): + pytest.importorskip("scipy") + path = list(range(10)) + np.random.shuffle(path) + G = nx.Graph() + nx.add_path(G, path) + order = nx.spectral_ordering(G, method=method) + assert order in [path, list(reversed(path))] + + @pytest.mark.parametrize("method", methods) + def test_seed_argument(self, method): + pytest.importorskip("scipy") + path = list(range(10)) + np.random.shuffle(path) + G = nx.Graph() + nx.add_path(G, path) + order = nx.spectral_ordering(G, method=method, seed=1) + assert order in [path, list(reversed(path))] + + @pytest.mark.parametrize("method", methods) + def test_disconnected(self, method): + pytest.importorskip("scipy") + G = nx.Graph() + nx.add_path(G, range(0, 10, 2)) + nx.add_path(G, range(1, 10, 2)) + order = nx.spectral_ordering(G, method=method) + assert set(order) == set(G) + seqs = [ + list(range(0, 10, 2)), + list(range(8, -1, -2)), + list(range(1, 10, 2)), + list(range(9, -1, -2)), + ] + assert order[:5] in seqs + assert order[5:] in seqs + + @pytest.mark.parametrize( + ("normalized", "expected_order"), + ( + (False, [[1, 2, 0, 3, 4, 5, 6, 9, 7, 8], [8, 7, 9, 6, 5, 4, 3, 0, 2, 1]]), + (True, [[1, 2, 3, 0, 4, 5, 9, 6, 7, 8], [8, 7, 6, 9, 5, 4, 0, 3, 2, 1]]), + ), + ) + @pytest.mark.parametrize("method", methods) + def test_cycle(self, normalized, expected_order, method): + pytest.importorskip("scipy") + path = list(range(10)) + G = nx.Graph() + nx.add_path(G, path, weight=5) + G.add_edge(path[-1], path[0], weight=1) + A = nx.laplacian_matrix(G).todense() + order = nx.spectral_ordering(G, normalized=normalized, method=method) + assert order in expected_order diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_attrmatrix.py b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_attrmatrix.py new file mode 100644 index 0000000..05a3b94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_attrmatrix.py @@ -0,0 +1,108 @@ +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") + + +def test_attr_matrix(): + G = nx.Graph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + + def node_attr(u): + return G.nodes[u].get("size", 0.5) * 3 + + def edge_attr(u, v): + return G[u][v].get("thickness", 0.5) + + M = nx.attr_matrix(G, edge_attr=edge_attr, node_attr=node_attr) + np.testing.assert_equal(M[0], np.array([[6.0]])) + assert M[1] == [1.5] + + +def test_attr_matrix_directed(): + G = nx.DiGraph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + M = nx.attr_matrix(G, rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 1., 1.], + [0., 0., 1.], + [0., 0., 0.]] + ) + # fmt: on + np.testing.assert_equal(M, np.array(data)) + + +def test_attr_matrix_multigraph(): + G = nx.MultiGraph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + M = nx.attr_matrix(G, rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 3., 1.], + [3., 0., 1.], + [1., 1., 0.]] + ) + # fmt: on + np.testing.assert_equal(M, np.array(data)) + M = nx.attr_matrix(G, edge_attr="weight", rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 9., 1.], + [9., 0., 1.], + [1., 1., 0.]] + ) + # fmt: on + np.testing.assert_equal(M, np.array(data)) + M = nx.attr_matrix(G, edge_attr="thickness", rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 3., 2.], + [3., 0., 3.], + [2., 3., 0.]] + ) + # fmt: on + np.testing.assert_equal(M, np.array(data)) + + +def test_attr_sparse_matrix(): + pytest.importorskip("scipy") + G = nx.Graph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + M = nx.attr_sparse_matrix(G) + mtx = M[0] + data = np.ones((3, 3), float) + np.fill_diagonal(data, 0) + np.testing.assert_equal(mtx.todense(), np.array(data)) + assert M[1] == [0, 1, 2] + + +def test_attr_sparse_matrix_directed(): + pytest.importorskip("scipy") + G = nx.DiGraph() + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 1, thickness=1, weight=3) + G.add_edge(0, 2, thickness=2) + G.add_edge(1, 2, thickness=3) + M = nx.attr_sparse_matrix(G, rc_order=[0, 1, 2]) + # fmt: off + data = np.array( + [[0., 1., 1.], + [0., 0., 1.], + [0., 0., 0.]] + ) + # fmt: on + np.testing.assert_equal(M.todense(), np.array(data)) diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_bethehessian.py b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_bethehessian.py new file mode 100644 index 0000000..92b745b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_bethehessian.py @@ -0,0 +1,40 @@ +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +class TestBetheHessian: + @classmethod + def setup_class(cls): + deg = [3, 2, 2, 1, 0] + cls.G = nx.havel_hakimi_graph(deg) + cls.P = nx.path_graph(3) + + def test_bethe_hessian(self): + "Bethe Hessian matrix" + # fmt: off + H = np.array([[4, -2, 0], + [-2, 5, -2], + [0, -2, 4]]) + # fmt: on + permutation = [2, 0, 1] + # Bethe Hessian gives expected form + np.testing.assert_equal(nx.bethe_hessian_matrix(self.P, r=2).todense(), H) + # nodelist is correctly implemented + np.testing.assert_equal( + nx.bethe_hessian_matrix(self.P, r=2, nodelist=permutation).todense(), + H[np.ix_(permutation, permutation)], + ) + # Equal to Laplacian matrix when r=1 + np.testing.assert_equal( + nx.bethe_hessian_matrix(self.G, r=1).todense(), + nx.laplacian_matrix(self.G).todense(), + ) + # Correct default for the regularizer r + np.testing.assert_equal( + nx.bethe_hessian_matrix(self.G).todense(), + nx.bethe_hessian_matrix(self.G, r=1.25).todense(), + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_graphmatrix.py b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_graphmatrix.py new file mode 100644 index 0000000..d84a397 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_graphmatrix.py @@ -0,0 +1,275 @@ +import pytest + +import networkx as nx +from networkx.exception import NetworkXError + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +def test_incidence_matrix_simple(): + deg = [3, 2, 2, 1, 0] + G = nx.havel_hakimi_graph(deg) + deg = [(1, 0), (1, 0), (1, 0), (2, 0), (1, 0), (2, 1), (0, 1), (0, 1)] + MG = nx.random_clustered_graph(deg, seed=42) + + I = nx.incidence_matrix(G, dtype=int).todense() + # fmt: off + expected = np.array( + [[1, 1, 1, 0], + [0, 1, 0, 1], + [1, 0, 0, 1], + [0, 0, 1, 0], + [0, 0, 0, 0]] + ) + # fmt: on + np.testing.assert_equal(I, expected) + + I = nx.incidence_matrix(MG, dtype=int).todense() + # fmt: off + expected = np.array( + [[1, 0, 0, 0, 0, 0, 0], + [1, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0], + [0, 0, 0, 0, 0, 1, 1], + [0, 0, 0, 0, 1, 0, 1]] + ) + # fmt: on + np.testing.assert_equal(I, expected) + + with pytest.raises(NetworkXError): + nx.incidence_matrix(G, nodelist=[0, 1]) + + +class TestGraphMatrix: + @classmethod + def setup_class(cls): + deg = [3, 2, 2, 1, 0] + cls.G = nx.havel_hakimi_graph(deg) + # fmt: off + cls.OI = np.array( + [[-1, -1, -1, 0], + [1, 0, 0, -1], + [0, 1, 0, 1], + [0, 0, 1, 0], + [0, 0, 0, 0]] + ) + cls.A = np.array( + [[0, 1, 1, 1, 0], + [1, 0, 1, 0, 0], + [1, 1, 0, 0, 0], + [1, 0, 0, 0, 0], + [0, 0, 0, 0, 0]] + ) + # fmt: on + cls.WG = nx.havel_hakimi_graph(deg) + cls.WG.add_edges_from( + (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.G.edges() + ) + # fmt: off + cls.WA = np.array( + [[0, 0.5, 0.5, 0.5, 0], + [0.5, 0, 0.5, 0, 0], + [0.5, 0.5, 0, 0, 0], + [0.5, 0, 0, 0, 0], + [0, 0, 0, 0, 0]] + ) + # fmt: on + cls.MG = nx.MultiGraph(cls.G) + cls.MG2 = cls.MG.copy() + cls.MG2.add_edge(0, 1) + # fmt: off + cls.MG2A = np.array( + [[0, 2, 1, 1, 0], + [2, 0, 1, 0, 0], + [1, 1, 0, 0, 0], + [1, 0, 0, 0, 0], + [0, 0, 0, 0, 0]] + ) + cls.MGOI = np.array( + [[-1, -1, -1, -1, 0], + [1, 1, 0, 0, -1], + [0, 0, 1, 0, 1], + [0, 0, 0, 1, 0], + [0, 0, 0, 0, 0]] + ) + # fmt: on + cls.no_edges_G = nx.Graph([(1, 2), (3, 2, {"weight": 8})]) + cls.no_edges_A = np.array([[0, 0], [0, 0]]) + + def test_incidence_matrix(self): + "Conversion to incidence matrix" + I = nx.incidence_matrix( + self.G, + nodelist=sorted(self.G), + edgelist=sorted(self.G.edges()), + oriented=True, + dtype=int, + ).todense() + np.testing.assert_equal(I, self.OI) + + I = nx.incidence_matrix( + self.G, + nodelist=sorted(self.G), + edgelist=sorted(self.G.edges()), + oriented=False, + dtype=int, + ).todense() + np.testing.assert_equal(I, np.abs(self.OI)) + + I = nx.incidence_matrix( + self.MG, + nodelist=sorted(self.MG), + edgelist=sorted(self.MG.edges()), + oriented=True, + dtype=int, + ).todense() + np.testing.assert_equal(I, self.OI) + + I = nx.incidence_matrix( + self.MG, + nodelist=sorted(self.MG), + edgelist=sorted(self.MG.edges()), + oriented=False, + dtype=int, + ).todense() + np.testing.assert_equal(I, np.abs(self.OI)) + + I = nx.incidence_matrix( + self.MG2, + nodelist=sorted(self.MG2), + edgelist=sorted(self.MG2.edges()), + oriented=True, + dtype=int, + ).todense() + np.testing.assert_equal(I, self.MGOI) + + I = nx.incidence_matrix( + self.MG2, + nodelist=sorted(self.MG), + edgelist=sorted(self.MG2.edges()), + oriented=False, + dtype=int, + ).todense() + np.testing.assert_equal(I, np.abs(self.MGOI)) + + I = nx.incidence_matrix(self.G, dtype=np.uint8) + assert I.dtype == np.uint8 + + def test_weighted_incidence_matrix(self): + I = nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=True, + dtype=int, + ).todense() + np.testing.assert_equal(I, self.OI) + + I = nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=False, + dtype=int, + ).todense() + np.testing.assert_equal(I, np.abs(self.OI)) + + # np.testing.assert_equal(nx.incidence_matrix(self.WG,oriented=True, + # weight='weight').todense(),0.5*self.OI) + # np.testing.assert_equal(nx.incidence_matrix(self.WG,weight='weight').todense(), + # np.abs(0.5*self.OI)) + # np.testing.assert_equal(nx.incidence_matrix(self.WG,oriented=True,weight='other').todense(), + # 0.3*self.OI) + + I = nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=True, + weight="weight", + ).todense() + np.testing.assert_equal(I, 0.5 * self.OI) + + I = nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=False, + weight="weight", + ).todense() + np.testing.assert_equal(I, np.abs(0.5 * self.OI)) + + I = nx.incidence_matrix( + self.WG, + nodelist=sorted(self.WG), + edgelist=sorted(self.WG.edges()), + oriented=True, + weight="other", + ).todense() + np.testing.assert_equal(I, 0.3 * self.OI) + + # WMG=nx.MultiGraph(self.WG) + # WMG.add_edge(0,1,weight=0.5,other=0.3) + # np.testing.assert_equal(nx.incidence_matrix(WMG,weight='weight').todense(), + # np.abs(0.5*self.MGOI)) + # np.testing.assert_equal(nx.incidence_matrix(WMG,weight='weight',oriented=True).todense(), + # 0.5*self.MGOI) + # np.testing.assert_equal(nx.incidence_matrix(WMG,weight='other',oriented=True).todense(), + # 0.3*self.MGOI) + + WMG = nx.MultiGraph(self.WG) + WMG.add_edge(0, 1, weight=0.5, other=0.3) + + I = nx.incidence_matrix( + WMG, + nodelist=sorted(WMG), + edgelist=sorted(WMG.edges(keys=True)), + oriented=True, + weight="weight", + ).todense() + np.testing.assert_equal(I, 0.5 * self.MGOI) + + I = nx.incidence_matrix( + WMG, + nodelist=sorted(WMG), + edgelist=sorted(WMG.edges(keys=True)), + oriented=False, + weight="weight", + ).todense() + np.testing.assert_equal(I, np.abs(0.5 * self.MGOI)) + + I = nx.incidence_matrix( + WMG, + nodelist=sorted(WMG), + edgelist=sorted(WMG.edges(keys=True)), + oriented=True, + weight="other", + ).todense() + np.testing.assert_equal(I, 0.3 * self.MGOI) + + def test_adjacency_matrix(self): + "Conversion to adjacency matrix" + np.testing.assert_equal(nx.adjacency_matrix(self.G).todense(), self.A) + np.testing.assert_equal(nx.adjacency_matrix(self.MG).todense(), self.A) + np.testing.assert_equal(nx.adjacency_matrix(self.MG2).todense(), self.MG2A) + np.testing.assert_equal( + nx.adjacency_matrix(self.G, nodelist=[0, 1]).todense(), self.A[:2, :2] + ) + np.testing.assert_equal(nx.adjacency_matrix(self.WG).todense(), self.WA) + np.testing.assert_equal( + nx.adjacency_matrix(self.WG, weight=None).todense(), self.A + ) + np.testing.assert_equal( + nx.adjacency_matrix(self.MG2, weight=None).todense(), self.MG2A + ) + np.testing.assert_equal( + nx.adjacency_matrix(self.WG, weight="other").todense(), 0.6 * self.WA + ) + np.testing.assert_equal( + nx.adjacency_matrix(self.no_edges_G, nodelist=[1, 3]).todense(), + self.no_edges_A, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_laplacian.py b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_laplacian.py new file mode 100644 index 0000000..b1d5c13 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_laplacian.py @@ -0,0 +1,334 @@ +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +class TestLaplacian: + @classmethod + def setup_class(cls): + deg = [3, 2, 2, 1, 0] + cls.G = nx.havel_hakimi_graph(deg) + cls.WG = nx.Graph( + (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.G.edges() + ) + cls.WG.add_node(4) + cls.MG = nx.MultiGraph(cls.G) + + # Graph with clsloops + cls.Gsl = cls.G.copy() + for node in cls.Gsl.nodes(): + cls.Gsl.add_edge(node, node) + + # Graph used as an example in Sec. 4.1 of Langville and Meyer, + # "Google's PageRank and Beyond". + cls.DiG = nx.DiGraph() + cls.DiG.add_edges_from( + ( + (1, 2), + (1, 3), + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ) + ) + cls.DiMG = nx.MultiDiGraph(cls.DiG) + cls.DiWG = nx.DiGraph( + (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.DiG.edges() + ) + cls.DiGsl = cls.DiG.copy() + for node in cls.DiGsl.nodes(): + cls.DiGsl.add_edge(node, node) + + def test_laplacian(self): + "Graph Laplacian" + # fmt: off + NL = np.array([[ 3, -1, -1, -1, 0], + [-1, 2, -1, 0, 0], + [-1, -1, 2, 0, 0], + [-1, 0, 0, 1, 0], + [ 0, 0, 0, 0, 0]]) + # fmt: on + WL = 0.5 * NL + OL = 0.3 * NL + # fmt: off + DiNL = np.array([[ 2, -1, -1, 0, 0, 0], + [ 0, 0, 0, 0, 0, 0], + [-1, -1, 3, -1, 0, 0], + [ 0, 0, 0, 2, -1, -1], + [ 0, 0, 0, -1, 2, -1], + [ 0, 0, 0, 0, -1, 1]]) + # fmt: on + DiWL = 0.5 * DiNL + DiOL = 0.3 * DiNL + np.testing.assert_equal(nx.laplacian_matrix(self.G).todense(), NL) + np.testing.assert_equal(nx.laplacian_matrix(self.MG).todense(), NL) + np.testing.assert_equal( + nx.laplacian_matrix(self.G, nodelist=[0, 1]).todense(), + np.array([[1, -1], [-1, 1]]), + ) + np.testing.assert_equal(nx.laplacian_matrix(self.WG).todense(), WL) + np.testing.assert_equal(nx.laplacian_matrix(self.WG, weight=None).todense(), NL) + np.testing.assert_equal( + nx.laplacian_matrix(self.WG, weight="other").todense(), OL + ) + + np.testing.assert_equal(nx.laplacian_matrix(self.DiG).todense(), DiNL) + np.testing.assert_equal(nx.laplacian_matrix(self.DiMG).todense(), DiNL) + np.testing.assert_equal( + nx.laplacian_matrix(self.DiG, nodelist=[1, 2]).todense(), + np.array([[1, -1], [0, 0]]), + ) + np.testing.assert_equal(nx.laplacian_matrix(self.DiWG).todense(), DiWL) + np.testing.assert_equal( + nx.laplacian_matrix(self.DiWG, weight=None).todense(), DiNL + ) + np.testing.assert_equal( + nx.laplacian_matrix(self.DiWG, weight="other").todense(), DiOL + ) + + def test_normalized_laplacian(self): + "Generalized Graph Laplacian" + # fmt: off + G = np.array([[ 1. , -0.408, -0.408, -0.577, 0.], + [-0.408, 1. , -0.5 , 0. , 0.], + [-0.408, -0.5 , 1. , 0. , 0.], + [-0.577, 0. , 0. , 1. , 0.], + [ 0. , 0. , 0. , 0. , 0.]]) + GL = np.array([[ 1. , -0.408, -0.408, -0.577, 0. ], + [-0.408, 1. , -0.5 , 0. , 0. ], + [-0.408, -0.5 , 1. , 0. , 0. ], + [-0.577, 0. , 0. , 1. , 0. ], + [ 0. , 0. , 0. , 0. , 0. ]]) + Lsl = np.array([[ 0.75 , -0.2887, -0.2887, -0.3536, 0. ], + [-0.2887, 0.6667, -0.3333, 0. , 0. ], + [-0.2887, -0.3333, 0.6667, 0. , 0. ], + [-0.3536, 0. , 0. , 0.5 , 0. ], + [ 0. , 0. , 0. , 0. , 0. ]]) + + DiG = np.array([[ 1. , 0. , -0.4082, 0. , 0. , 0. ], + [ 0. , 0. , 0. , 0. , 0. , 0. ], + [-0.4082, 0. , 1. , 0. , -0.4082, 0. ], + [ 0. , 0. , 0. , 1. , -0.5 , -0.7071], + [ 0. , 0. , 0. , -0.5 , 1. , -0.7071], + [ 0. , 0. , 0. , -0.7071, 0. , 1. ]]) + DiGL = np.array([[ 1. , 0. , -0.4082, 0. , 0. , 0. ], + [ 0. , 0. , 0. , 0. , 0. , 0. ], + [-0.4082, 0. , 1. , -0.4082, 0. , 0. ], + [ 0. , 0. , 0. , 1. , -0.5 , -0.7071], + [ 0. , 0. , 0. , -0.5 , 1. , -0.7071], + [ 0. , 0. , 0. , 0. , -0.7071, 1. ]]) + DiLsl = np.array([[ 0.6667, -0.5774, -0.2887, 0. , 0. , 0. ], + [ 0. , 0. , 0. , 0. , 0. , 0. ], + [-0.2887, -0.5 , 0.75 , -0.2887, 0. , 0. ], + [ 0. , 0. , 0. , 0.6667, -0.3333, -0.4082], + [ 0. , 0. , 0. , -0.3333, 0.6667, -0.4082], + [ 0. , 0. , 0. , 0. , -0.4082, 0.5 ]]) + # fmt: on + + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.G, nodelist=range(5)).todense(), + G, + decimal=3, + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.G).todense(), GL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.MG).todense(), GL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.WG).todense(), GL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.WG, weight="other").todense(), + GL, + decimal=3, + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.Gsl).todense(), Lsl, decimal=3 + ) + + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix( + self.DiG, + nodelist=range(1, 1 + 6), + ).todense(), + DiG, + decimal=3, + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiG).todense(), DiGL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiMG).todense(), DiGL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiWG).todense(), DiGL, decimal=3 + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiWG, weight="other").todense(), + DiGL, + decimal=3, + ) + np.testing.assert_almost_equal( + nx.normalized_laplacian_matrix(self.DiGsl).todense(), DiLsl, decimal=3 + ) + + +def test_directed_laplacian(): + "Directed Laplacian" + # Graph used as an example in Sec. 4.1 of Langville and Meyer, + # "Google's PageRank and Beyond". The graph contains dangling nodes, so + # the pagerank random walk is selected by directed_laplacian + G = nx.DiGraph() + G.add_edges_from( + ( + (1, 2), + (1, 3), + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ) + ) + # fmt: off + GL = np.array([[ 0.9833, -0.2941, -0.3882, -0.0291, -0.0231, -0.0261], + [-0.2941, 0.8333, -0.2339, -0.0536, -0.0589, -0.0554], + [-0.3882, -0.2339, 0.9833, -0.0278, -0.0896, -0.0251], + [-0.0291, -0.0536, -0.0278, 0.9833, -0.4878, -0.6675], + [-0.0231, -0.0589, -0.0896, -0.4878, 0.9833, -0.2078], + [-0.0261, -0.0554, -0.0251, -0.6675, -0.2078, 0.9833]]) + # fmt: on + L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G)) + np.testing.assert_almost_equal(L, GL, decimal=3) + + # Make the graph strongly connected, so we can use a random and lazy walk + G.add_edges_from(((2, 5), (6, 1))) + # fmt: off + GL = np.array([[ 1. , -0.3062, -0.4714, 0. , 0. , -0.3227], + [-0.3062, 1. , -0.1443, 0. , -0.3162, 0. ], + [-0.4714, -0.1443, 1. , 0. , -0.0913, 0. ], + [ 0. , 0. , 0. , 1. , -0.5 , -0.5 ], + [ 0. , -0.3162, -0.0913, -0.5 , 1. , -0.25 ], + [-0.3227, 0. , 0. , -0.5 , -0.25 , 1. ]]) + # fmt: on + L = nx.directed_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="random" + ) + np.testing.assert_almost_equal(L, GL, decimal=3) + + # fmt: off + GL = np.array([[ 0.5 , -0.1531, -0.2357, 0. , 0. , -0.1614], + [-0.1531, 0.5 , -0.0722, 0. , -0.1581, 0. ], + [-0.2357, -0.0722, 0.5 , 0. , -0.0456, 0. ], + [ 0. , 0. , 0. , 0.5 , -0.25 , -0.25 ], + [ 0. , -0.1581, -0.0456, -0.25 , 0.5 , -0.125 ], + [-0.1614, 0. , 0. , -0.25 , -0.125 , 0.5 ]]) + # fmt: on + L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G), walk_type="lazy") + np.testing.assert_almost_equal(L, GL, decimal=3) + + # Make a strongly connected periodic graph + G = nx.DiGraph() + G.add_edges_from(((1, 2), (2, 4), (4, 1), (1, 3), (3, 4))) + # fmt: off + GL = np.array([[ 0.5 , -0.176, -0.176, -0.25 ], + [-0.176, 0.5 , 0. , -0.176], + [-0.176, 0. , 0.5 , -0.176], + [-0.25 , -0.176, -0.176, 0.5 ]]) + # fmt: on + L = nx.directed_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G)) + np.testing.assert_almost_equal(L, GL, decimal=3) + + +def test_directed_combinatorial_laplacian(): + "Directed combinatorial Laplacian" + # Graph used as an example in Sec. 4.1 of Langville and Meyer, + # "Google's PageRank and Beyond". The graph contains dangling nodes, so + # the pagerank random walk is selected by directed_laplacian + G = nx.DiGraph() + G.add_edges_from( + ( + (1, 2), + (1, 3), + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ) + ) + # fmt: off + GL = np.array([[ 0.0366, -0.0132, -0.0153, -0.0034, -0.0020, -0.0027], + [-0.0132, 0.0450, -0.0111, -0.0076, -0.0062, -0.0069], + [-0.0153, -0.0111, 0.0408, -0.0035, -0.0083, -0.0027], + [-0.0034, -0.0076, -0.0035, 0.3688, -0.1356, -0.2187], + [-0.0020, -0.0062, -0.0083, -0.1356, 0.2026, -0.0505], + [-0.0027, -0.0069, -0.0027, -0.2187, -0.0505, 0.2815]]) + # fmt: on + + L = nx.directed_combinatorial_laplacian_matrix(G, alpha=0.9, nodelist=sorted(G)) + np.testing.assert_almost_equal(L, GL, decimal=3) + + # Make the graph strongly connected, so we can use a random and lazy walk + G.add_edges_from(((2, 5), (6, 1))) + + # fmt: off + GL = np.array([[ 0.1395, -0.0349, -0.0465, 0. , 0. , -0.0581], + [-0.0349, 0.093 , -0.0116, 0. , -0.0465, 0. ], + [-0.0465, -0.0116, 0.0698, 0. , -0.0116, 0. ], + [ 0. , 0. , 0. , 0.2326, -0.1163, -0.1163], + [ 0. , -0.0465, -0.0116, -0.1163, 0.2326, -0.0581], + [-0.0581, 0. , 0. , -0.1163, -0.0581, 0.2326]]) + # fmt: on + + L = nx.directed_combinatorial_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="random" + ) + np.testing.assert_almost_equal(L, GL, decimal=3) + + # fmt: off + GL = np.array([[ 0.0698, -0.0174, -0.0233, 0. , 0. , -0.0291], + [-0.0174, 0.0465, -0.0058, 0. , -0.0233, 0. ], + [-0.0233, -0.0058, 0.0349, 0. , -0.0058, 0. ], + [ 0. , 0. , 0. , 0.1163, -0.0581, -0.0581], + [ 0. , -0.0233, -0.0058, -0.0581, 0.1163, -0.0291], + [-0.0291, 0. , 0. , -0.0581, -0.0291, 0.1163]]) + # fmt: on + + L = nx.directed_combinatorial_laplacian_matrix( + G, alpha=0.9, nodelist=sorted(G), walk_type="lazy" + ) + np.testing.assert_almost_equal(L, GL, decimal=3) + + E = nx.DiGraph(nx.margulis_gabber_galil_graph(2)) + L = nx.directed_combinatorial_laplacian_matrix(E) + # fmt: off + expected = np.array( + [[ 0.16666667, -0.08333333, -0.08333333, 0. ], + [-0.08333333, 0.16666667, 0. , -0.08333333], + [-0.08333333, 0. , 0.16666667, -0.08333333], + [ 0. , -0.08333333, -0.08333333, 0.16666667]] + ) + # fmt: on + np.testing.assert_almost_equal(L, expected, decimal=6) + + with pytest.raises(nx.NetworkXError): + nx.directed_combinatorial_laplacian_matrix(G, walk_type="pagerank", alpha=100) + with pytest.raises(nx.NetworkXError): + nx.directed_combinatorial_laplacian_matrix(G, walk_type="silly") diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_modularity.py b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_modularity.py new file mode 100644 index 0000000..48dda00 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_modularity.py @@ -0,0 +1,86 @@ +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +class TestModularity: + @classmethod + def setup_class(cls): + deg = [3, 2, 2, 1, 0] + cls.G = nx.havel_hakimi_graph(deg) + # Graph used as an example in Sec. 4.1 of Langville and Meyer, + # "Google's PageRank and Beyond". (Used for test_directed_laplacian) + cls.DG = nx.DiGraph() + cls.DG.add_edges_from( + ( + (1, 2), + (1, 3), + (3, 1), + (3, 2), + (3, 5), + (4, 5), + (4, 6), + (5, 4), + (5, 6), + (6, 4), + ) + ) + + def test_modularity(self): + "Modularity matrix" + # fmt: off + B = np.array([[-1.125, 0.25, 0.25, 0.625, 0.], + [0.25, -0.5, 0.5, -0.25, 0.], + [0.25, 0.5, -0.5, -0.25, 0.], + [0.625, -0.25, -0.25, -0.125, 0.], + [0., 0., 0., 0., 0.]]) + # fmt: on + + permutation = [4, 0, 1, 2, 3] + np.testing.assert_equal(nx.modularity_matrix(self.G), B) + np.testing.assert_equal( + nx.modularity_matrix(self.G, nodelist=permutation), + B[np.ix_(permutation, permutation)], + ) + + def test_modularity_weight(self): + "Modularity matrix with weights" + # fmt: off + B = np.array([[-1.125, 0.25, 0.25, 0.625, 0.], + [0.25, -0.5, 0.5, -0.25, 0.], + [0.25, 0.5, -0.5, -0.25, 0.], + [0.625, -0.25, -0.25, -0.125, 0.], + [0., 0., 0., 0., 0.]]) + # fmt: on + + G_weighted = self.G.copy() + for n1, n2 in G_weighted.edges(): + G_weighted.edges[n1, n2]["weight"] = 0.5 + # The following test would fail in networkx 1.1 + np.testing.assert_equal(nx.modularity_matrix(G_weighted), B) + # The following test that the modularity matrix get rescaled accordingly + np.testing.assert_equal( + nx.modularity_matrix(G_weighted, weight="weight"), 0.5 * B + ) + + def test_directed_modularity(self): + "Directed Modularity matrix" + # fmt: off + B = np.array([[-0.2, 0.6, 0.8, -0.4, -0.4, -0.4], + [0., 0., 0., 0., 0., 0.], + [0.7, 0.4, -0.3, -0.6, 0.4, -0.6], + [-0.2, -0.4, -0.2, -0.4, 0.6, 0.6], + [-0.2, -0.4, -0.2, 0.6, -0.4, 0.6], + [-0.1, -0.2, -0.1, 0.8, -0.2, -0.2]]) + # fmt: on + node_permutation = [5, 1, 2, 3, 4, 6] + idx_permutation = [4, 0, 1, 2, 3, 5] + mm = nx.directed_modularity_matrix(self.DG, nodelist=sorted(self.DG)) + np.testing.assert_equal(mm, B) + np.testing.assert_equal( + nx.directed_modularity_matrix(self.DG, nodelist=node_permutation), + B[np.ix_(idx_permutation, idx_permutation)], + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_spectrum.py b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_spectrum.py new file mode 100644 index 0000000..01009f0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/linalg/tests/test_spectrum.py @@ -0,0 +1,70 @@ +import pytest + +import networkx as nx + +np = pytest.importorskip("numpy") +pytest.importorskip("scipy") + + +class TestSpectrum: + @classmethod + def setup_class(cls): + deg = [3, 2, 2, 1, 0] + cls.G = nx.havel_hakimi_graph(deg) + cls.P = nx.path_graph(3) + cls.WG = nx.Graph( + (u, v, {"weight": 0.5, "other": 0.3}) for (u, v) in cls.G.edges() + ) + cls.WG.add_node(4) + cls.DG = nx.DiGraph() + nx.add_path(cls.DG, [0, 1, 2]) + + def test_laplacian_spectrum(self): + "Laplacian eigenvalues" + evals = np.array([0, 0, 1, 3, 4]) + e = sorted(nx.laplacian_spectrum(self.G)) + np.testing.assert_almost_equal(e, evals) + e = sorted(nx.laplacian_spectrum(self.WG, weight=None)) + np.testing.assert_almost_equal(e, evals) + e = sorted(nx.laplacian_spectrum(self.WG)) + np.testing.assert_almost_equal(e, 0.5 * evals) + e = sorted(nx.laplacian_spectrum(self.WG, weight="other")) + np.testing.assert_almost_equal(e, 0.3 * evals) + + def test_normalized_laplacian_spectrum(self): + "Normalized Laplacian eigenvalues" + evals = np.array([0, 0, 0.7712864461218, 1.5, 1.7287135538781]) + e = sorted(nx.normalized_laplacian_spectrum(self.G)) + np.testing.assert_almost_equal(e, evals) + e = sorted(nx.normalized_laplacian_spectrum(self.WG, weight=None)) + np.testing.assert_almost_equal(e, evals) + e = sorted(nx.normalized_laplacian_spectrum(self.WG)) + np.testing.assert_almost_equal(e, evals) + e = sorted(nx.normalized_laplacian_spectrum(self.WG, weight="other")) + np.testing.assert_almost_equal(e, evals) + + def test_adjacency_spectrum(self): + "Adjacency eigenvalues" + evals = np.array([-np.sqrt(2), 0, np.sqrt(2)]) + e = sorted(nx.adjacency_spectrum(self.P)) + np.testing.assert_almost_equal(e, evals) + + def test_modularity_spectrum(self): + "Modularity eigenvalues" + evals = np.array([-1.5, 0.0, 0.0]) + e = sorted(nx.modularity_spectrum(self.P)) + np.testing.assert_almost_equal(e, evals) + # Directed modularity eigenvalues + evals = np.array([-0.5, 0.0, 0.0]) + e = sorted(nx.modularity_spectrum(self.DG)) + np.testing.assert_almost_equal(e, evals) + + def test_bethe_hessian_spectrum(self): + "Bethe Hessian eigenvalues" + evals = np.array([0.5 * (9 - np.sqrt(33)), 4, 0.5 * (9 + np.sqrt(33))]) + e = sorted(nx.bethe_hessian_spectrum(self.P, r=2)) + np.testing.assert_almost_equal(e, evals) + # Collapses back to Laplacian: + e1 = sorted(nx.bethe_hessian_spectrum(self.P, r=1)) + e2 = sorted(nx.laplacian_spectrum(self.P)) + np.testing.assert_almost_equal(e1, e2) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__init__.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/__init__.py new file mode 100644 index 0000000..a805c50 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/__init__.py @@ -0,0 +1,17 @@ +""" +A package for reading and writing graphs in various formats. + +""" + +from networkx.readwrite.adjlist import * +from networkx.readwrite.multiline_adjlist import * +from networkx.readwrite.edgelist import * +from networkx.readwrite.pajek import * +from networkx.readwrite.leda import * +from networkx.readwrite.sparse6 import * +from networkx.readwrite.graph6 import * +from networkx.readwrite.gml import * +from networkx.readwrite.graphml import * +from networkx.readwrite.gexf import * +from networkx.readwrite.json_graph import * +from networkx.readwrite.text import * diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..777c059 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-312.pyc new file mode 100644 index 0000000..304dd74 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/adjlist.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/edgelist.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/edgelist.cpython-312.pyc new file mode 100644 index 0000000..7071468 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/edgelist.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/gexf.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/gexf.cpython-312.pyc new file mode 100644 index 0000000..2420818 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/gexf.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/gml.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/gml.cpython-312.pyc new file mode 100644 index 0000000..8276e9c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/gml.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/graph6.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/graph6.cpython-312.pyc new file mode 100644 index 0000000..7b7bb0e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/graph6.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/graphml.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/graphml.cpython-312.pyc new file mode 100644 index 0000000..3687715 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/graphml.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/leda.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/leda.cpython-312.pyc new file mode 100644 index 0000000..06dd7d3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/leda.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/multiline_adjlist.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/multiline_adjlist.cpython-312.pyc new file mode 100644 index 0000000..63f5679 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/multiline_adjlist.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/p2g.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/p2g.cpython-312.pyc new file mode 100644 index 0000000..653ea13 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/p2g.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/pajek.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/pajek.cpython-312.pyc new file mode 100644 index 0000000..8d66d54 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/pajek.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-312.pyc new file mode 100644 index 0000000..1c2a810 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/sparse6.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/text.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/text.cpython-312.pyc new file mode 100644 index 0000000..664baf3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/__pycache__/text.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/adjlist.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/adjlist.py new file mode 100644 index 0000000..d759939 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/adjlist.py @@ -0,0 +1,310 @@ +""" +************** +Adjacency List +************** +Read and write NetworkX graphs as adjacency lists. + +Adjacency list format is useful for graphs without data associated +with nodes or edges and for nodes that can be meaningfully represented +as strings. + +Format +------ +The adjacency list format consists of lines with node labels. The +first label in a line is the source node. Further labels in the line +are considered target nodes and are added to the graph along with an edge +between the source node and target node. + +The graph with edges a-b, a-c, d-e can be represented as the following +adjacency list (anything following the # in a line is a comment):: + + a b c # source target target + d e +""" + +__all__ = ["generate_adjlist", "write_adjlist", "parse_adjlist", "read_adjlist"] + +import networkx as nx +from networkx.utils import open_file + + +def generate_adjlist(G, delimiter=" "): + """Generate a single line of the graph G in adjacency list format. + + Parameters + ---------- + G : NetworkX graph + + delimiter : string, optional + Separator for node labels + + Returns + ------- + lines : string + Lines of data in adjlist format. + + Examples + -------- + >>> G = nx.lollipop_graph(4, 3) + >>> for line in nx.generate_adjlist(G): + ... print(line) + 0 1 2 3 + 1 2 3 + 2 3 + 3 4 + 4 5 + 5 6 + 6 + + See Also + -------- + write_adjlist, read_adjlist + + Notes + ----- + The default `delimiter=" "` will result in unexpected results if node names contain + whitespace characters. To avoid this problem, specify an alternate delimiter when spaces are + valid in node names. + + NB: This option is not available for data that isn't user-generated. + + """ + directed = G.is_directed() + seen = set() + for s, nbrs in G.adjacency(): + line = str(s) + delimiter + for t, data in nbrs.items(): + if not directed and t in seen: + continue + if G.is_multigraph(): + for d in data.values(): + line += str(t) + delimiter + else: + line += str(t) + delimiter + if not directed: + seen.add(s) + yield line[: -len(delimiter)] + + +@open_file(1, mode="wb") +def write_adjlist(G, path, comments="#", delimiter=" ", encoding="utf-8"): + """Write graph G in single-line adjacency-list format to path. + + + Parameters + ---------- + G : NetworkX graph + + path : string or file + Filename or file handle for data output. + Filenames ending in .gz or .bz2 will be compressed. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels + + encoding : string, optional + Text encoding. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_adjlist(G, "path4.adjlist") + + The path can be a filehandle or a string with the name of the file. If a + filehandle is provided, it has to be opened in 'wb' mode. + + >>> fh = open("path4.adjlist2", "wb") + >>> nx.write_adjlist(G, fh) + + Notes + ----- + The default `delimiter=" "` will result in unexpected results if node names contain + whitespace characters. To avoid this problem, specify an alternate delimiter when spaces are + valid in node names. + NB: This option is not available for data that isn't user-generated. + + This format does not store graph, node, or edge data. + + See Also + -------- + read_adjlist, generate_adjlist + """ + import sys + import time + + pargs = comments + " ".join(sys.argv) + "\n" + header = ( + pargs + + comments + + f" GMT {time.asctime(time.gmtime())}\n" + + comments + + f" {G.name}\n" + ) + path.write(header.encode(encoding)) + + for line in generate_adjlist(G, delimiter): + line += "\n" + path.write(line.encode(encoding)) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_adjlist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None +): + """Parse lines of a graph adjacency list representation. + + Parameters + ---------- + lines : list or iterator of strings + Input data in adjlist format + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + nodetype : Python type, optional + Convert nodes to this type. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels. The default is whitespace. + + Returns + ------- + G: NetworkX graph + The graph corresponding to the lines in adjacency list format. + + Examples + -------- + >>> lines = ["1 2 5", "2 3 4", "3 5", "4", "5"] + >>> G = nx.parse_adjlist(lines, nodetype=int) + >>> nodes = [1, 2, 3, 4, 5] + >>> all(node in G for node in nodes) + True + >>> edges = [(1, 2), (1, 5), (2, 3), (2, 4), (3, 5)] + >>> all((u, v) in G.edges() or (v, u) in G.edges() for (u, v) in edges) + True + + See Also + -------- + read_adjlist + + """ + G = nx.empty_graph(0, create_using) + for line in lines: + p = line.find(comments) + if p >= 0: + line = line[:p] + if not len(line): + continue + vlist = line.rstrip("\n").split(delimiter) + u = vlist.pop(0) + # convert types + if nodetype is not None: + try: + u = nodetype(u) + except BaseException as err: + raise TypeError( + f"Failed to convert node ({u}) to type {nodetype}" + ) from err + G.add_node(u) + if nodetype is not None: + try: + vlist = list(map(nodetype, vlist)) + except BaseException as err: + raise TypeError( + f"Failed to convert nodes ({','.join(vlist)}) to type {nodetype}" + ) from err + G.add_edges_from([(u, v) for v in vlist]) + return G + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_adjlist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + encoding="utf-8", +): + """Read graph in adjacency list format from path. + + Parameters + ---------- + path : string or file + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + nodetype : Python type, optional + Convert nodes to this type. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels. The default is whitespace. + + Returns + ------- + G: NetworkX graph + The graph corresponding to the lines in adjacency list format. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_adjlist(G, "test.adjlist") + >>> G = nx.read_adjlist("test.adjlist") + + The path can be a filehandle or a string with the name of the file. If a + filehandle is provided, it has to be opened in 'rb' mode. + + >>> fh = open("test.adjlist", "rb") + >>> G = nx.read_adjlist(fh) + + Filenames ending in .gz or .bz2 will be compressed. + + >>> nx.write_adjlist(G, "test.adjlist.gz") + >>> G = nx.read_adjlist("test.adjlist.gz") + + The optional nodetype is a function to convert node strings to nodetype. + + For example + + >>> G = nx.read_adjlist("test.adjlist", nodetype=int) + + will attempt to convert all nodes to integer type. + + Since nodes must be hashable, the function nodetype must return hashable + types (e.g. int, float, str, frozenset - or tuples of those, etc.) + + The optional create_using parameter indicates the type of NetworkX graph + created. The default is `nx.Graph`, an undirected graph. + To read the data as a directed graph use + + >>> G = nx.read_adjlist("test.adjlist", create_using=nx.DiGraph) + + Notes + ----- + This format does not store graph or node data. + + See Also + -------- + write_adjlist + """ + lines = (line.decode(encoding) for line in path) + return parse_adjlist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/edgelist.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/edgelist.py new file mode 100644 index 0000000..afdb175 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/edgelist.py @@ -0,0 +1,489 @@ +""" +********** +Edge Lists +********** +Read and write NetworkX graphs as edge lists. + +The multi-line adjacency list format is useful for graphs with nodes +that can be meaningfully represented as strings. With the edgelist +format simple edge data can be stored but node or graph data is not. +There is no way of representing isolated nodes unless the node has a +self-loop edge. + +Format +------ +You can read or write three formats of edge lists with these functions. + +Node pairs with no data:: + + 1 2 + +Python dictionary as data:: + + 1 2 {'weight':7, 'color':'green'} + +Arbitrary data:: + + 1 2 7 green +""" + +__all__ = [ + "generate_edgelist", + "write_edgelist", + "parse_edgelist", + "read_edgelist", + "read_weighted_edgelist", + "write_weighted_edgelist", +] + +import networkx as nx +from networkx.utils import open_file + + +def generate_edgelist(G, delimiter=" ", data=True): + """Generate a single line of the graph G in edge list format. + + Parameters + ---------- + G : NetworkX graph + + delimiter : string, optional + Separator for node labels + + data : bool or list of keys + If False generate no edge data. If True use a dictionary + representation of edge data. If a list of keys use a list of data + values corresponding to the keys. + + Returns + ------- + lines : string + Lines of data in adjlist format. + + Examples + -------- + >>> G = nx.lollipop_graph(4, 3) + >>> G[1][2]["weight"] = 3 + >>> G[3][4]["capacity"] = 12 + >>> for line in nx.generate_edgelist(G, data=False): + ... print(line) + 0 1 + 0 2 + 0 3 + 1 2 + 1 3 + 2 3 + 3 4 + 4 5 + 5 6 + + >>> for line in nx.generate_edgelist(G): + ... print(line) + 0 1 {} + 0 2 {} + 0 3 {} + 1 2 {'weight': 3} + 1 3 {} + 2 3 {} + 3 4 {'capacity': 12} + 4 5 {} + 5 6 {} + + >>> for line in nx.generate_edgelist(G, data=["weight"]): + ... print(line) + 0 1 + 0 2 + 0 3 + 1 2 3 + 1 3 + 2 3 + 3 4 + 4 5 + 5 6 + + See Also + -------- + write_adjlist, read_adjlist + """ + if data is True: + for u, v, d in G.edges(data=True): + e = u, v, dict(d) + yield delimiter.join(map(str, e)) + elif data is False: + for u, v in G.edges(data=False): + e = u, v + yield delimiter.join(map(str, e)) + else: + for u, v, d in G.edges(data=True): + e = [u, v] + try: + e.extend(d[k] for k in data) + except KeyError: + pass # missing data for this edge, should warn? + yield delimiter.join(map(str, e)) + + +@open_file(1, mode="wb") +def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"): + """Write graph as a list of edges. + + Parameters + ---------- + G : graph + A NetworkX graph + path : file or string + File or filename to write. If a file is provided, it must be + opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed. + comments : string, optional + The character used to indicate the start of a comment + delimiter : string, optional + The string used to separate values. The default is whitespace. + data : bool or list, optional + If False write no edge data. + If True write a string representation of the edge data dictionary.. + If a list (or other iterable) is provided, write the keys specified + in the list. + encoding: string, optional + Specify which encoding to use when writing file. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_edgelist(G, "test.edgelist") + >>> G = nx.path_graph(4) + >>> fh = open("test.edgelist", "wb") + >>> nx.write_edgelist(G, fh) + >>> nx.write_edgelist(G, "test.edgelist.gz") + >>> nx.write_edgelist(G, "test.edgelist_nodata.gz", data=False) + + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=7, color="red") + >>> nx.write_edgelist(G, "test.edgelist_bigger_nodata", data=False) + >>> nx.write_edgelist(G, "test.edgelist_color", data=["color"]) + >>> nx.write_edgelist(G, "test.edgelist_color_weight", data=["color", "weight"]) + + See Also + -------- + read_edgelist + write_weighted_edgelist + """ + + for line in generate_edgelist(G, delimiter, data): + line += "\n" + path.write(line.encode(encoding)) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_edgelist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True +): + """Parse lines of an edge list representation of a graph. + + Parameters + ---------- + lines : list or iterator of strings + Input data in edgelist format + comments : string, optional + Marker for comment lines. Default is `'#'`. To specify that no character + should be treated as a comment, use ``comments=None``. + delimiter : string, optional + Separator for node labels. Default is `None`, meaning any whitespace. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + nodetype : Python type, optional + Convert nodes to this type. Default is `None`, meaning no conversion is + performed. + data : bool or list of (label,type) tuples + If `False` generate no edge data or if `True` use a dictionary + representation of edge data or a list tuples specifying dictionary + key names and types for edge data. + + Returns + ------- + G: NetworkX Graph + The graph corresponding to lines + + Examples + -------- + Edgelist with no data: + + >>> lines = ["1 2", "2 3", "3 4"] + >>> G = nx.parse_edgelist(lines, nodetype=int) + >>> list(G) + [1, 2, 3, 4] + >>> list(G.edges()) + [(1, 2), (2, 3), (3, 4)] + + Edgelist with data in Python dictionary representation: + + >>> lines = ["1 2 {'weight': 3}", "2 3 {'weight': 27}", "3 4 {'weight': 3.0}"] + >>> G = nx.parse_edgelist(lines, nodetype=int) + >>> list(G) + [1, 2, 3, 4] + >>> list(G.edges(data=True)) + [(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})] + + Edgelist with data in a list: + + >>> lines = ["1 2 3", "2 3 27", "3 4 3.0"] + >>> G = nx.parse_edgelist(lines, nodetype=int, data=(("weight", float),)) + >>> list(G) + [1, 2, 3, 4] + >>> list(G.edges(data=True)) + [(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})] + + See Also + -------- + read_weighted_edgelist + """ + from ast import literal_eval + + G = nx.empty_graph(0, create_using) + for line in lines: + if comments is not None: + p = line.find(comments) + if p >= 0: + line = line[:p] + if not line: + continue + # split line, should have 2 or more + s = line.rstrip("\n").split(delimiter) + if len(s) < 2: + continue + u = s.pop(0) + v = s.pop(0) + d = s + if nodetype is not None: + try: + u = nodetype(u) + v = nodetype(v) + except Exception as err: + raise TypeError( + f"Failed to convert nodes {u},{v} to type {nodetype}." + ) from err + + if len(d) == 0 or data is False: + # no data or data type specified + edgedata = {} + elif data is True: + # no edge types specified + try: # try to evaluate as dictionary + if delimiter == ",": + edgedata_str = ",".join(d) + else: + edgedata_str = " ".join(d) + edgedata = dict(literal_eval(edgedata_str.strip())) + except Exception as err: + raise TypeError( + f"Failed to convert edge data ({d}) to dictionary." + ) from err + else: + # convert edge data to dictionary with specified keys and type + if len(d) != len(data): + raise IndexError( + f"Edge data {d} and data_keys {data} are not the same length" + ) + edgedata = {} + for (edge_key, edge_type), edge_value in zip(data, d): + try: + edge_value = edge_type(edge_value) + except Exception as err: + raise TypeError( + f"Failed to convert {edge_key} data {edge_value} " + f"to type {edge_type}." + ) from err + edgedata.update({edge_key: edge_value}) + G.add_edge(u, v, **edgedata) + return G + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + data=True, + edgetype=None, + encoding="utf-8", +): + """Read a graph from a list of edges. + + Parameters + ---------- + path : file or string + File or filename to read. If a file is provided, it must be + opened in 'rb' mode. + Filenames ending in .gz or .bz2 will be decompressed. + comments : string, optional + The character used to indicate the start of a comment. To specify that + no character should be treated as a comment, use ``comments=None``. + delimiter : string, optional + The string used to separate values. The default is whitespace. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + nodetype : int, float, str, Python type, optional + Convert node data from strings to specified type + data : bool or list of (label,type) tuples + Tuples specifying dictionary key names and types for edge data + edgetype : int, float, str, Python type, optional OBSOLETE + Convert edge data from strings to specified type and use as 'weight' + encoding: string, optional + Specify which encoding to use when reading file. + + Returns + ------- + G : graph + A networkx Graph or other type specified with create_using + + Examples + -------- + >>> nx.write_edgelist(nx.path_graph(4), "test.edgelist_P4") + >>> G = nx.read_edgelist("test.edgelist_P4") + + >>> fh = open("test.edgelist_P4", "rb") + >>> G = nx.read_edgelist(fh) + >>> fh.close() + + >>> G = nx.read_edgelist("test.edgelist_P4", nodetype=int) + >>> G = nx.read_edgelist("test.edgelist_P4", create_using=nx.DiGraph) + + Edgelist with data in a list: + + >>> textline = "1 2 3" + >>> fh = open("test.textline", "w") + >>> d = fh.write(textline) + >>> fh.close() + >>> G = nx.read_edgelist("test.textline", nodetype=int, data=(("weight", float),)) + >>> list(G) + [1, 2] + >>> list(G.edges(data=True)) + [(1, 2, {'weight': 3.0})] + + See parse_edgelist() for more examples of formatting. + + See Also + -------- + parse_edgelist + write_edgelist + + Notes + ----- + Since nodes must be hashable, the function nodetype must return hashable + types (e.g. int, float, str, frozenset - or tuples of those, etc.) + """ + lines = (line if isinstance(line, str) else line.decode(encoding) for line in path) + return parse_edgelist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=data, + ) + + +def write_weighted_edgelist(G, path, comments="#", delimiter=" ", encoding="utf-8"): + """Write graph G as a list of edges with numeric weights. + + Parameters + ---------- + G : graph + A NetworkX graph + path : file or string + File or filename to write. If a file is provided, it must be + opened in 'wb' mode. + Filenames ending in .gz or .bz2 will be compressed. + comments : string, optional + The character used to indicate the start of a comment + delimiter : string, optional + The string used to separate values. The default is whitespace. + encoding: string, optional + Specify which encoding to use when writing file. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_edge(1, 2, weight=7) + >>> nx.write_weighted_edgelist(G, "test.weighted.edgelist") + + See Also + -------- + read_edgelist + write_edgelist + read_weighted_edgelist + """ + write_edgelist( + G, + path, + comments=comments, + delimiter=delimiter, + data=("weight",), + encoding=encoding, + ) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def read_weighted_edgelist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + encoding="utf-8", +): + """Read a graph as list of edges with numeric weights. + + Parameters + ---------- + path : file or string + File or filename to read. If a file is provided, it must be + opened in 'rb' mode. + Filenames ending in .gz or .bz2 will be decompressed. + comments : string, optional + The character used to indicate the start of a comment. + delimiter : string, optional + The string used to separate values. The default is whitespace. + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + nodetype : int, float, str, Python type, optional + Convert node data from strings to specified type + encoding: string, optional + Specify which encoding to use when reading file. + + Returns + ------- + G : graph + A networkx Graph or other type specified with create_using + + Notes + ----- + Since nodes must be hashable, the function nodetype must return hashable + types (e.g. int, float, str, frozenset - or tuples of those, etc.) + + Example edgelist file format. + + With numeric edge data:: + + # read with + # >>> G=nx.read_weighted_edgelist(fh) + # source target data + a b 1 + a c 3.14159 + d e 42 + + See Also + -------- + write_weighted_edgelist + """ + return read_edgelist( + path, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + data=(("weight", float),), + encoding=encoding, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/gexf.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/gexf.py new file mode 100644 index 0000000..da726e3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/gexf.py @@ -0,0 +1,1064 @@ +"""Read and write graphs in GEXF format. + +.. warning:: + This parser uses the standard xml library present in Python, which is + insecure - see :external+python:mod:`xml` for additional information. + Only parse GEFX files you trust. + +GEXF (Graph Exchange XML Format) is a language for describing complex +network structures, their associated data and dynamics. + +This implementation does not support mixed graphs (directed and +undirected edges together). + +Format +------ +GEXF is an XML format. See http://gexf.net/schema.html for the +specification and http://gexf.net/basic.html for examples. +""" + +import itertools +import time +from xml.etree.ElementTree import ( + Element, + ElementTree, + SubElement, + register_namespace, + tostring, +) + +import networkx as nx +from networkx.utils import open_file + +__all__ = ["write_gexf", "read_gexf", "relabel_gexf_graph", "generate_gexf"] + + +@open_file(1, mode="wb") +def write_gexf(G, path, encoding="utf-8", prettyprint=True, version="1.2draft"): + """Write G in GEXF format to path. + + "GEXF (Graph Exchange XML Format) is a language for describing + complex networks structures, their associated data and dynamics" [1]_. + + Node attributes are checked according to the version of the GEXF + schemas used for parameters which are not user defined, + e.g. visualization 'viz' [2]_. See example for usage. + + Parameters + ---------- + G : graph + A NetworkX graph + path : file or string + File or file name to write. + File names ending in .gz or .bz2 will be compressed. + encoding : string (optional, default: 'utf-8') + Encoding for text data. + prettyprint : bool (optional, default: True) + If True use line breaks and indenting in output XML. + version: string (optional, default: '1.2draft') + The version of GEXF to be used for nodes attributes checking + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_gexf(G, "test.gexf") + + # visualization data + >>> G.nodes[0]["viz"] = {"size": 54} + >>> G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1} + >>> G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256} + + + Notes + ----- + This implementation does not support mixed graphs (directed and undirected + edges together). + + The node id attribute is set to be the string of the node label. + If you want to specify an id use set it as node data, e.g. + node['a']['id']=1 to set the id of node 'a' to 1. + + References + ---------- + .. [1] GEXF File Format, http://gexf.net/ + .. [2] GEXF schema, http://gexf.net/schema.html + """ + writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint, version=version) + writer.add_graph(G) + writer.write(path) + + +def generate_gexf(G, encoding="utf-8", prettyprint=True, version="1.2draft"): + """Generate lines of GEXF format representation of G. + + "GEXF (Graph Exchange XML Format) is a language for describing + complex networks structures, their associated data and dynamics" [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph + encoding : string (optional, default: 'utf-8') + Encoding for text data. + prettyprint : bool (optional, default: True) + If True use line breaks and indenting in output XML. + version : string (default: 1.2draft) + Version of GEFX File Format (see http://gexf.net/schema.html) + Supported values: "1.1draft", "1.2draft" + + + Examples + -------- + >>> G = nx.path_graph(4) + >>> linefeed = chr(10) # linefeed=\n + >>> s = linefeed.join(nx.generate_gexf(G)) + >>> for line in nx.generate_gexf(G): # doctest: +SKIP + ... print(line) + + Notes + ----- + This implementation does not support mixed graphs (directed and undirected + edges together). + + The node id attribute is set to be the string of the node label. + If you want to specify an id use set it as node data, e.g. + node['a']['id']=1 to set the id of node 'a' to 1. + + References + ---------- + .. [1] GEXF File Format, https://gephi.org/gexf/format/ + """ + writer = GEXFWriter(encoding=encoding, prettyprint=prettyprint, version=version) + writer.add_graph(G) + yield from str(writer).splitlines() + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_gexf(path, node_type=None, relabel=False, version="1.2draft"): + """Read graph in GEXF format from path. + + "GEXF (Graph Exchange XML Format) is a language for describing + complex networks structures, their associated data and dynamics" [1]_. + + Parameters + ---------- + path : file or string + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + node_type: Python type (default: None) + Convert node ids to this type if not None. + relabel : bool (default: False) + If True relabel the nodes to use the GEXF node "label" attribute + instead of the node "id" attribute as the NetworkX node label. + version : string (default: 1.2draft) + Version of GEFX File Format (see http://gexf.net/schema.html) + Supported values: "1.1draft", "1.2draft" + + Returns + ------- + graph: NetworkX graph + If no parallel edges are found a Graph or DiGraph is returned. + Otherwise a MultiGraph or MultiDiGraph is returned. + + Notes + ----- + This implementation does not support mixed graphs (directed and undirected + edges together). + + References + ---------- + .. [1] GEXF File Format, http://gexf.net/ + """ + reader = GEXFReader(node_type=node_type, version=version) + if relabel: + G = relabel_gexf_graph(reader(path)) + else: + G = reader(path) + return G + + +class GEXF: + versions = { + "1.1draft": { + "NS_GEXF": "http://www.gexf.net/1.1draft", + "NS_VIZ": "http://www.gexf.net/1.1draft/viz", + "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance", + "SCHEMALOCATION": " ".join( + [ + "http://www.gexf.net/1.1draft", + "http://www.gexf.net/1.1draft/gexf.xsd", + ] + ), + "VERSION": "1.1", + }, + "1.2draft": { + "NS_GEXF": "http://www.gexf.net/1.2draft", + "NS_VIZ": "http://www.gexf.net/1.2draft/viz", + "NS_XSI": "http://www.w3.org/2001/XMLSchema-instance", + "SCHEMALOCATION": " ".join( + [ + "http://www.gexf.net/1.2draft", + "http://www.gexf.net/1.2draft/gexf.xsd", + ] + ), + "VERSION": "1.2", + }, + } + + def construct_types(self): + types = [ + (int, "integer"), + (float, "float"), + (float, "double"), + (bool, "boolean"), + (list, "string"), + (dict, "string"), + (int, "long"), + (str, "liststring"), + (str, "anyURI"), + (str, "string"), + ] + + # These additions to types allow writing numpy types + try: + import numpy as np + except ImportError: + pass + else: + # prepend so that python types are created upon read (last entry wins) + types = [ + (np.float64, "float"), + (np.float32, "float"), + (np.float16, "float"), + (np.int_, "int"), + (np.int8, "int"), + (np.int16, "int"), + (np.int32, "int"), + (np.int64, "int"), + (np.uint8, "int"), + (np.uint16, "int"), + (np.uint32, "int"), + (np.uint64, "int"), + (np.int_, "int"), + (np.intc, "int"), + (np.intp, "int"), + ] + types + + self.xml_type = dict(types) + self.python_type = dict(reversed(a) for a in types) + + # http://www.w3.org/TR/xmlschema-2/#boolean + convert_bool = { + "true": True, + "false": False, + "True": True, + "False": False, + "0": False, + 0: False, + "1": True, + 1: True, + } + + def set_version(self, version): + d = self.versions.get(version) + if d is None: + raise nx.NetworkXError(f"Unknown GEXF version {version}.") + self.NS_GEXF = d["NS_GEXF"] + self.NS_VIZ = d["NS_VIZ"] + self.NS_XSI = d["NS_XSI"] + self.SCHEMALOCATION = d["SCHEMALOCATION"] + self.VERSION = d["VERSION"] + self.version = version + + +class GEXFWriter(GEXF): + # class for writing GEXF format files + # use write_gexf() function + def __init__( + self, graph=None, encoding="utf-8", prettyprint=True, version="1.2draft" + ): + self.construct_types() + self.prettyprint = prettyprint + self.encoding = encoding + self.set_version(version) + self.xml = Element( + "gexf", + { + "xmlns": self.NS_GEXF, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + "version": self.VERSION, + }, + ) + + # Make meta element a non-graph element + # Also add lastmodifieddate as attribute, not tag + meta_element = Element("meta") + subelement_text = f"NetworkX {nx.__version__}" + SubElement(meta_element, "creator").text = subelement_text + meta_element.set("lastmodifieddate", time.strftime("%Y-%m-%d")) + self.xml.append(meta_element) + + register_namespace("viz", self.NS_VIZ) + + # counters for edge and attribute identifiers + self.edge_id = itertools.count() + self.attr_id = itertools.count() + self.all_edge_ids = set() + # default attributes are stored in dictionaries + self.attr = {} + self.attr["node"] = {} + self.attr["edge"] = {} + self.attr["node"]["dynamic"] = {} + self.attr["node"]["static"] = {} + self.attr["edge"]["dynamic"] = {} + self.attr["edge"]["static"] = {} + + if graph is not None: + self.add_graph(graph) + + def __str__(self): + if self.prettyprint: + self.indent(self.xml) + s = tostring(self.xml).decode(self.encoding) + return s + + def add_graph(self, G): + # first pass through G collecting edge ids + for u, v, dd in G.edges(data=True): + eid = dd.get("id") + if eid is not None: + self.all_edge_ids.add(str(eid)) + # set graph attributes + if G.graph.get("mode") == "dynamic": + mode = "dynamic" + else: + mode = "static" + # Add a graph element to the XML + if G.is_directed(): + default = "directed" + else: + default = "undirected" + name = G.graph.get("name", "") + graph_element = Element("graph", defaultedgetype=default, mode=mode, name=name) + self.graph_element = graph_element + self.add_nodes(G, graph_element) + self.add_edges(G, graph_element) + self.xml.append(graph_element) + + def add_nodes(self, G, graph_element): + nodes_element = Element("nodes") + for node, data in G.nodes(data=True): + node_data = data.copy() + node_id = str(node_data.pop("id", node)) + kw = {"id": node_id} + label = str(node_data.pop("label", node)) + kw["label"] = label + try: + pid = node_data.pop("pid") + kw["pid"] = str(pid) + except KeyError: + pass + try: + start = node_data.pop("start") + kw["start"] = str(start) + self.alter_graph_mode_timeformat(start) + except KeyError: + pass + try: + end = node_data.pop("end") + kw["end"] = str(end) + self.alter_graph_mode_timeformat(end) + except KeyError: + pass + # add node element with attributes + node_element = Element("node", **kw) + # add node element and attr subelements + default = G.graph.get("node_default", {}) + node_data = self.add_parents(node_element, node_data) + if self.VERSION == "1.1": + node_data = self.add_slices(node_element, node_data) + else: + node_data = self.add_spells(node_element, node_data) + node_data = self.add_viz(node_element, node_data) + node_data = self.add_attributes("node", node_element, node_data, default) + nodes_element.append(node_element) + graph_element.append(nodes_element) + + def add_edges(self, G, graph_element): + def edge_key_data(G): + # helper function to unify multigraph and graph edge iterator + if G.is_multigraph(): + for u, v, key, data in G.edges(data=True, keys=True): + edge_data = data.copy() + edge_data.update(key=key) + edge_id = edge_data.pop("id", None) + if edge_id is None: + edge_id = next(self.edge_id) + while str(edge_id) in self.all_edge_ids: + edge_id = next(self.edge_id) + self.all_edge_ids.add(str(edge_id)) + yield u, v, edge_id, edge_data + else: + for u, v, data in G.edges(data=True): + edge_data = data.copy() + edge_id = edge_data.pop("id", None) + if edge_id is None: + edge_id = next(self.edge_id) + while str(edge_id) in self.all_edge_ids: + edge_id = next(self.edge_id) + self.all_edge_ids.add(str(edge_id)) + yield u, v, edge_id, edge_data + + edges_element = Element("edges") + for u, v, key, edge_data in edge_key_data(G): + kw = {"id": str(key)} + try: + edge_label = edge_data.pop("label") + kw["label"] = str(edge_label) + except KeyError: + pass + try: + edge_weight = edge_data.pop("weight") + kw["weight"] = str(edge_weight) + except KeyError: + pass + try: + edge_type = edge_data.pop("type") + kw["type"] = str(edge_type) + except KeyError: + pass + try: + start = edge_data.pop("start") + kw["start"] = str(start) + self.alter_graph_mode_timeformat(start) + except KeyError: + pass + try: + end = edge_data.pop("end") + kw["end"] = str(end) + self.alter_graph_mode_timeformat(end) + except KeyError: + pass + source_id = str(G.nodes[u].get("id", u)) + target_id = str(G.nodes[v].get("id", v)) + edge_element = Element("edge", source=source_id, target=target_id, **kw) + default = G.graph.get("edge_default", {}) + if self.VERSION == "1.1": + edge_data = self.add_slices(edge_element, edge_data) + else: + edge_data = self.add_spells(edge_element, edge_data) + edge_data = self.add_viz(edge_element, edge_data) + edge_data = self.add_attributes("edge", edge_element, edge_data, default) + edges_element.append(edge_element) + graph_element.append(edges_element) + + def add_attributes(self, node_or_edge, xml_obj, data, default): + # Add attrvalues to node or edge + attvalues = Element("attvalues") + if len(data) == 0: + return data + mode = "static" + for k, v in data.items(): + # rename generic multigraph key to avoid any name conflict + if k == "key": + k = "networkx_key" + val_type = type(v) + if val_type not in self.xml_type: + raise TypeError(f"attribute value type is not allowed: {val_type}") + if isinstance(v, list): + # dynamic data + for val, start, end in v: + val_type = type(val) + if start is not None or end is not None: + mode = "dynamic" + self.alter_graph_mode_timeformat(start) + self.alter_graph_mode_timeformat(end) + break + attr_id = self.get_attr_id( + str(k), self.xml_type[val_type], node_or_edge, default, mode + ) + for val, start, end in v: + e = Element("attvalue") + e.attrib["for"] = attr_id + e.attrib["value"] = str(val) + # Handle nan, inf, -inf differently + if val_type is float: + if e.attrib["value"] == "inf": + e.attrib["value"] = "INF" + elif e.attrib["value"] == "nan": + e.attrib["value"] = "NaN" + elif e.attrib["value"] == "-inf": + e.attrib["value"] = "-INF" + if start is not None: + e.attrib["start"] = str(start) + if end is not None: + e.attrib["end"] = str(end) + attvalues.append(e) + else: + # static data + mode = "static" + attr_id = self.get_attr_id( + str(k), self.xml_type[val_type], node_or_edge, default, mode + ) + e = Element("attvalue") + e.attrib["for"] = attr_id + if isinstance(v, bool): + e.attrib["value"] = str(v).lower() + else: + e.attrib["value"] = str(v) + # Handle float nan, inf, -inf differently + if val_type is float: + if e.attrib["value"] == "inf": + e.attrib["value"] = "INF" + elif e.attrib["value"] == "nan": + e.attrib["value"] = "NaN" + elif e.attrib["value"] == "-inf": + e.attrib["value"] = "-INF" + attvalues.append(e) + xml_obj.append(attvalues) + return data + + def get_attr_id(self, title, attr_type, edge_or_node, default, mode): + # find the id of the attribute or generate a new id + try: + return self.attr[edge_or_node][mode][title] + except KeyError: + # generate new id + new_id = str(next(self.attr_id)) + self.attr[edge_or_node][mode][title] = new_id + attr_kwargs = {"id": new_id, "title": title, "type": attr_type} + attribute = Element("attribute", **attr_kwargs) + # add subelement for data default value if present + default_title = default.get(title) + if default_title is not None: + default_element = Element("default") + default_element.text = str(default_title) + attribute.append(default_element) + # new insert it into the XML + attributes_element = None + for a in self.graph_element.findall("attributes"): + # find existing attributes element by class and mode + a_class = a.get("class") + a_mode = a.get("mode", "static") + if a_class == edge_or_node and a_mode == mode: + attributes_element = a + if attributes_element is None: + # create new attributes element + attr_kwargs = {"mode": mode, "class": edge_or_node} + attributes_element = Element("attributes", **attr_kwargs) + self.graph_element.insert(0, attributes_element) + attributes_element.append(attribute) + return new_id + + def add_viz(self, element, node_data): + viz = node_data.pop("viz", False) + if viz: + color = viz.get("color") + if color is not None: + if self.VERSION == "1.1": + e = Element( + f"{{{self.NS_VIZ}}}color", + r=str(color.get("r")), + g=str(color.get("g")), + b=str(color.get("b")), + ) + else: + e = Element( + f"{{{self.NS_VIZ}}}color", + r=str(color.get("r")), + g=str(color.get("g")), + b=str(color.get("b")), + a=str(color.get("a", 1.0)), + ) + element.append(e) + + size = viz.get("size") + if size is not None: + e = Element(f"{{{self.NS_VIZ}}}size", value=str(size)) + element.append(e) + + thickness = viz.get("thickness") + if thickness is not None: + e = Element(f"{{{self.NS_VIZ}}}thickness", value=str(thickness)) + element.append(e) + + shape = viz.get("shape") + if shape is not None: + if shape.startswith("http"): + e = Element( + f"{{{self.NS_VIZ}}}shape", value="image", uri=str(shape) + ) + else: + e = Element(f"{{{self.NS_VIZ}}}shape", value=str(shape)) + element.append(e) + + position = viz.get("position") + if position is not None: + e = Element( + f"{{{self.NS_VIZ}}}position", + x=str(position.get("x")), + y=str(position.get("y")), + z=str(position.get("z")), + ) + element.append(e) + return node_data + + def add_parents(self, node_element, node_data): + parents = node_data.pop("parents", False) + if parents: + parents_element = Element("parents") + for p in parents: + e = Element("parent") + e.attrib["for"] = str(p) + parents_element.append(e) + node_element.append(parents_element) + return node_data + + def add_slices(self, node_or_edge_element, node_or_edge_data): + slices = node_or_edge_data.pop("slices", False) + if slices: + slices_element = Element("slices") + for start, end in slices: + e = Element("slice", start=str(start), end=str(end)) + slices_element.append(e) + node_or_edge_element.append(slices_element) + return node_or_edge_data + + def add_spells(self, node_or_edge_element, node_or_edge_data): + spells = node_or_edge_data.pop("spells", False) + if spells: + spells_element = Element("spells") + for start, end in spells: + e = Element("spell") + if start is not None: + e.attrib["start"] = str(start) + self.alter_graph_mode_timeformat(start) + if end is not None: + e.attrib["end"] = str(end) + self.alter_graph_mode_timeformat(end) + spells_element.append(e) + node_or_edge_element.append(spells_element) + return node_or_edge_data + + def alter_graph_mode_timeformat(self, start_or_end): + # If 'start' or 'end' appears, set timeformat + if start_or_end is not None: + if isinstance(start_or_end, str): + timeformat = "date" + elif isinstance(start_or_end, float): + timeformat = "double" + elif isinstance(start_or_end, int): + timeformat = "long" + else: + raise nx.NetworkXError( + "timeformat should be of the type int, float or str" + ) + self.graph_element.set("timeformat", timeformat) + # If Graph mode is static, alter to dynamic + if self.graph_element.get("mode") == "static": + self.graph_element.set("mode", "dynamic") + + def write(self, fh): + # Serialize graph G in GEXF to the open fh + if self.prettyprint: + self.indent(self.xml) + document = ElementTree(self.xml) + document.write(fh, encoding=self.encoding, xml_declaration=True) + + def indent(self, elem, level=0): + # in-place prettyprint formatter + i = "\n" + " " * level + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + self.indent(elem, level + 1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + +class GEXFReader(GEXF): + # Class to read GEXF format files + # use read_gexf() function + def __init__(self, node_type=None, version="1.2draft"): + self.construct_types() + self.node_type = node_type + # assume simple graph and test for multigraph on read + self.simple_graph = True + self.set_version(version) + + def __call__(self, stream): + self.xml = ElementTree(file=stream) + g = self.xml.find(f"{{{self.NS_GEXF}}}graph") + if g is not None: + return self.make_graph(g) + # try all the versions + for version in self.versions: + self.set_version(version) + g = self.xml.find(f"{{{self.NS_GEXF}}}graph") + if g is not None: + return self.make_graph(g) + raise nx.NetworkXError("No element in GEXF file.") + + def make_graph(self, graph_xml): + # start with empty DiGraph or MultiDiGraph + edgedefault = graph_xml.get("defaultedgetype", None) + if edgedefault == "directed": + G = nx.MultiDiGraph() + else: + G = nx.MultiGraph() + + # graph attributes + graph_name = graph_xml.get("name", "") + if graph_name != "": + G.graph["name"] = graph_name + graph_start = graph_xml.get("start") + if graph_start is not None: + G.graph["start"] = graph_start + graph_end = graph_xml.get("end") + if graph_end is not None: + G.graph["end"] = graph_end + graph_mode = graph_xml.get("mode", "") + if graph_mode == "dynamic": + G.graph["mode"] = "dynamic" + else: + G.graph["mode"] = "static" + + # timeformat + self.timeformat = graph_xml.get("timeformat") + if self.timeformat == "date": + self.timeformat = "string" + + # node and edge attributes + attributes_elements = graph_xml.findall(f"{{{self.NS_GEXF}}}attributes") + # dictionaries to hold attributes and attribute defaults + node_attr = {} + node_default = {} + edge_attr = {} + edge_default = {} + for a in attributes_elements: + attr_class = a.get("class") + if attr_class == "node": + na, nd = self.find_gexf_attributes(a) + node_attr.update(na) + node_default.update(nd) + G.graph["node_default"] = node_default + elif attr_class == "edge": + ea, ed = self.find_gexf_attributes(a) + edge_attr.update(ea) + edge_default.update(ed) + G.graph["edge_default"] = edge_default + else: + raise # unknown attribute class + + # Hack to handle Gephi0.7beta bug + # add weight attribute + ea = {"weight": {"type": "double", "mode": "static", "title": "weight"}} + ed = {} + edge_attr.update(ea) + edge_default.update(ed) + G.graph["edge_default"] = edge_default + + # add nodes + nodes_element = graph_xml.find(f"{{{self.NS_GEXF}}}nodes") + if nodes_element is not None: + for node_xml in nodes_element.findall(f"{{{self.NS_GEXF}}}node"): + self.add_node(G, node_xml, node_attr) + + # add edges + edges_element = graph_xml.find(f"{{{self.NS_GEXF}}}edges") + if edges_element is not None: + for edge_xml in edges_element.findall(f"{{{self.NS_GEXF}}}edge"): + self.add_edge(G, edge_xml, edge_attr) + + # switch to Graph or DiGraph if no parallel edges were found. + if self.simple_graph: + if G.is_directed(): + G = nx.DiGraph(G) + else: + G = nx.Graph(G) + return G + + def add_node(self, G, node_xml, node_attr, node_pid=None): + # add a single node with attributes to the graph + + # get attributes and subattributues for node + data = self.decode_attr_elements(node_attr, node_xml) + data = self.add_parents(data, node_xml) # add any parents + if self.VERSION == "1.1": + data = self.add_slices(data, node_xml) # add slices + else: + data = self.add_spells(data, node_xml) # add spells + data = self.add_viz(data, node_xml) # add viz + data = self.add_start_end(data, node_xml) # add start/end + + # find the node id and cast it to the appropriate type + node_id = node_xml.get("id") + if self.node_type is not None: + node_id = self.node_type(node_id) + + # every node should have a label + node_label = node_xml.get("label") + data["label"] = node_label + + # parent node id + node_pid = node_xml.get("pid", node_pid) + if node_pid is not None: + data["pid"] = node_pid + + # check for subnodes, recursive + subnodes = node_xml.find(f"{{{self.NS_GEXF}}}nodes") + if subnodes is not None: + for node_xml in subnodes.findall(f"{{{self.NS_GEXF}}}node"): + self.add_node(G, node_xml, node_attr, node_pid=node_id) + + G.add_node(node_id, **data) + + def add_start_end(self, data, xml): + # start and end times + ttype = self.timeformat + node_start = xml.get("start") + if node_start is not None: + data["start"] = self.python_type[ttype](node_start) + node_end = xml.get("end") + if node_end is not None: + data["end"] = self.python_type[ttype](node_end) + return data + + def add_viz(self, data, node_xml): + # add viz element for node + viz = {} + color = node_xml.find(f"{{{self.NS_VIZ}}}color") + if color is not None: + if self.VERSION == "1.1": + viz["color"] = { + "r": int(color.get("r")), + "g": int(color.get("g")), + "b": int(color.get("b")), + } + else: + viz["color"] = { + "r": int(color.get("r")), + "g": int(color.get("g")), + "b": int(color.get("b")), + "a": float(color.get("a", 1)), + } + + size = node_xml.find(f"{{{self.NS_VIZ}}}size") + if size is not None: + viz["size"] = float(size.get("value")) + + thickness = node_xml.find(f"{{{self.NS_VIZ}}}thickness") + if thickness is not None: + viz["thickness"] = float(thickness.get("value")) + + shape = node_xml.find(f"{{{self.NS_VIZ}}}shape") + if shape is not None: + viz["shape"] = shape.get("shape") + if viz["shape"] == "image": + viz["shape"] = shape.get("uri") + + position = node_xml.find(f"{{{self.NS_VIZ}}}position") + if position is not None: + viz["position"] = { + "x": float(position.get("x", 0)), + "y": float(position.get("y", 0)), + "z": float(position.get("z", 0)), + } + + if len(viz) > 0: + data["viz"] = viz + return data + + def add_parents(self, data, node_xml): + parents_element = node_xml.find(f"{{{self.NS_GEXF}}}parents") + if parents_element is not None: + data["parents"] = [] + for p in parents_element.findall(f"{{{self.NS_GEXF}}}parent"): + parent = p.get("for") + data["parents"].append(parent) + return data + + def add_slices(self, data, node_or_edge_xml): + slices_element = node_or_edge_xml.find(f"{{{self.NS_GEXF}}}slices") + if slices_element is not None: + data["slices"] = [] + for s in slices_element.findall(f"{{{self.NS_GEXF}}}slice"): + start = s.get("start") + end = s.get("end") + data["slices"].append((start, end)) + return data + + def add_spells(self, data, node_or_edge_xml): + spells_element = node_or_edge_xml.find(f"{{{self.NS_GEXF}}}spells") + if spells_element is not None: + data["spells"] = [] + ttype = self.timeformat + for s in spells_element.findall(f"{{{self.NS_GEXF}}}spell"): + start = self.python_type[ttype](s.get("start")) + end = self.python_type[ttype](s.get("end")) + data["spells"].append((start, end)) + return data + + def add_edge(self, G, edge_element, edge_attr): + # add an edge to the graph + + # raise error if we find mixed directed and undirected edges + edge_direction = edge_element.get("type") + if G.is_directed() and edge_direction == "undirected": + raise nx.NetworkXError("Undirected edge found in directed graph.") + if (not G.is_directed()) and edge_direction == "directed": + raise nx.NetworkXError("Directed edge found in undirected graph.") + + # Get source and target and recast type if required + source = edge_element.get("source") + target = edge_element.get("target") + if self.node_type is not None: + source = self.node_type(source) + target = self.node_type(target) + + data = self.decode_attr_elements(edge_attr, edge_element) + data = self.add_start_end(data, edge_element) + + if self.VERSION == "1.1": + data = self.add_slices(data, edge_element) # add slices + else: + data = self.add_spells(data, edge_element) # add spells + + # GEXF stores edge ids as an attribute + # NetworkX uses them as keys in multigraphs + # if networkx_key is not specified as an attribute + edge_id = edge_element.get("id") + if edge_id is not None: + data["id"] = edge_id + + # check if there is a 'multigraph_key' and use that as edge_id + multigraph_key = data.pop("networkx_key", None) + if multigraph_key is not None: + edge_id = multigraph_key + + weight = edge_element.get("weight") + if weight is not None: + data["weight"] = float(weight) + + edge_label = edge_element.get("label") + if edge_label is not None: + data["label"] = edge_label + + if G.has_edge(source, target): + # seen this edge before - this is a multigraph + self.simple_graph = False + G.add_edge(source, target, key=edge_id, **data) + if edge_direction == "mutual": + G.add_edge(target, source, key=edge_id, **data) + + def decode_attr_elements(self, gexf_keys, obj_xml): + # Use the key information to decode the attr XML + attr = {} + # look for outer '' element + attr_element = obj_xml.find(f"{{{self.NS_GEXF}}}attvalues") + if attr_element is not None: + # loop over elements + for a in attr_element.findall(f"{{{self.NS_GEXF}}}attvalue"): + key = a.get("for") # for is required + try: # should be in our gexf_keys dictionary + title = gexf_keys[key]["title"] + except KeyError as err: + raise nx.NetworkXError(f"No attribute defined for={key}.") from err + atype = gexf_keys[key]["type"] + value = a.get("value") + if atype == "boolean": + value = self.convert_bool[value] + else: + value = self.python_type[atype](value) + if gexf_keys[key]["mode"] == "dynamic": + # for dynamic graphs use list of three-tuples + # [(value1,start1,end1), (value2,start2,end2), etc] + ttype = self.timeformat + start = self.python_type[ttype](a.get("start")) + end = self.python_type[ttype](a.get("end")) + if title in attr: + attr[title].append((value, start, end)) + else: + attr[title] = [(value, start, end)] + else: + # for static graphs just assign the value + attr[title] = value + return attr + + def find_gexf_attributes(self, attributes_element): + # Extract all the attributes and defaults + attrs = {} + defaults = {} + mode = attributes_element.get("mode") + for k in attributes_element.findall(f"{{{self.NS_GEXF}}}attribute"): + attr_id = k.get("id") + title = k.get("title") + atype = k.get("type") + attrs[attr_id] = {"title": title, "type": atype, "mode": mode} + # check for the 'default' subelement of key element and add + default = k.find(f"{{{self.NS_GEXF}}}default") + if default is not None: + if atype == "boolean": + value = self.convert_bool[default.text] + else: + value = self.python_type[atype](default.text) + defaults[title] = value + return attrs, defaults + + +def relabel_gexf_graph(G): + """Relabel graph using "label" node keyword for node label. + + Parameters + ---------- + G : graph + A NetworkX graph read from GEXF data + + Returns + ------- + H : graph + A NetworkX graph with relabeled nodes + + Raises + ------ + NetworkXError + If node labels are missing or not unique while relabel=True. + + Notes + ----- + This function relabels the nodes in a NetworkX graph with the + "label" attribute. It also handles relabeling the specific GEXF + node attributes "parents", and "pid". + """ + # build mapping of node labels, do some error checking + try: + mapping = [(u, G.nodes[u]["label"]) for u in G] + except KeyError as err: + raise nx.NetworkXError( + "Failed to relabel nodes: missing node labels found. Use relabel=False." + ) from err + x, y = zip(*mapping) + if len(set(y)) != len(G): + raise nx.NetworkXError( + "Failed to relabel nodes: duplicate node labels found. Use relabel=False." + ) + mapping = dict(mapping) + H = nx.relabel_nodes(G, mapping) + # relabel attributes + for n in G: + m = mapping[n] + H.nodes[m]["id"] = n + H.nodes[m].pop("label") + if "pid" in H.nodes[m]: + H.nodes[m]["pid"] = mapping[G.nodes[n]["pid"]] + if "parents" in H.nodes[m]: + H.nodes[m]["parents"] = [mapping[p] for p in G.nodes[n]["parents"]] + return H diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/gml.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/gml.py new file mode 100644 index 0000000..c53496c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/gml.py @@ -0,0 +1,879 @@ +""" +Read graphs in GML format. + +"GML, the Graph Modelling Language, is our proposal for a portable +file format for graphs. GML's key features are portability, simple +syntax, extensibility and flexibility. A GML file consists of a +hierarchical key-value lists. Graphs can be annotated with arbitrary +data structures. The idea for a common file format was born at the +GD'95; this proposal is the outcome of many discussions. GML is the +standard file format in the Graphlet graph editor system. It has been +overtaken and adapted by several other systems for drawing graphs." + +GML files are stored using a 7-bit ASCII encoding with any extended +ASCII characters (iso8859-1) appearing as HTML character entities. +You will need to give some thought into how the exported data should +interact with different languages and even different Python versions. +Re-importing from gml is also a concern. + +Without specifying a `stringizer`/`destringizer`, the code is capable of +writing `int`/`float`/`str`/`dict`/`list` data as required by the GML +specification. For writing other data types, and for reading data other +than `str` you need to explicitly supply a `stringizer`/`destringizer`. + +For additional documentation on the GML file format, please see the +`GML website `_. + +Several example graphs in GML format may be found on Mark Newman's +`Network data page `_. +""" + +import html.entities as htmlentitydefs +import re +from ast import literal_eval +from collections import defaultdict +from enum import Enum +from io import StringIO +from typing import Any, NamedTuple + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils import open_file + +__all__ = ["read_gml", "parse_gml", "generate_gml", "write_gml"] + + +def escape(text): + """Use XML character references to escape characters. + + Use XML character references for unprintable or non-ASCII + characters, double quotes and ampersands in a string + """ + + def fixup(m): + ch = m.group(0) + return "&#" + str(ord(ch)) + ";" + + text = re.sub('[^ -~]|[&"]', fixup, text) + return text if isinstance(text, str) else str(text) + + +def unescape(text): + """Replace XML character references with the referenced characters""" + + def fixup(m): + text = m.group(0) + if text[1] == "#": + # Character reference + if text[2] == "x": + code = int(text[3:-1], 16) + else: + code = int(text[2:-1]) + else: + # Named entity + try: + code = htmlentitydefs.name2codepoint[text[1:-1]] + except KeyError: + return text # leave unchanged + try: + return chr(code) + except (ValueError, OverflowError): + return text # leave unchanged + + return re.sub("&(?:[0-9A-Za-z]+|#(?:[0-9]+|x[0-9A-Fa-f]+));", fixup, text) + + +def literal_destringizer(rep): + """Convert a Python literal to the value it represents. + + Parameters + ---------- + rep : string + A Python literal. + + Returns + ------- + value : object + The value of the Python literal. + + Raises + ------ + ValueError + If `rep` is not a Python literal. + """ + if isinstance(rep, str): + orig_rep = rep + try: + return literal_eval(rep) + except SyntaxError as err: + raise ValueError(f"{orig_rep!r} is not a valid Python literal") from err + else: + raise ValueError(f"{rep!r} is not a string") + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_gml(path, label="label", destringizer=None): + """Read graph in GML format from `path`. + + Parameters + ---------- + path : file or string + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + label : string, optional + If not None, the parsed nodes will be renamed according to node + attributes indicated by `label`. Default value: 'label'. + + destringizer : callable, optional + A `destringizer` that recovers values stored as strings in GML. If it + cannot convert a string to a value, a `ValueError` is raised. Default + value : None. + + Returns + ------- + G : NetworkX graph + The parsed graph. + + Raises + ------ + NetworkXError + If the input cannot be parsed. + + See Also + -------- + write_gml, parse_gml + literal_destringizer + + Notes + ----- + GML files are stored using a 7-bit ASCII encoding with any extended + ASCII characters (iso8859-1) appearing as HTML character entities. + Without specifying a `stringizer`/`destringizer`, the code is capable of + writing `int`/`float`/`str`/`dict`/`list` data as required by the GML + specification. For writing other data types, and for reading data other + than `str` you need to explicitly supply a `stringizer`/`destringizer`. + + For additional documentation on the GML file format, please see the + `GML url `_. + + See the module docstring :mod:`networkx.readwrite.gml` for more details. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_gml(G, "test_path4.gml") + + GML values are interpreted as strings by default: + + >>> H = nx.read_gml("test_path4.gml") + >>> H.nodes + NodeView(('0', '1', '2', '3')) + + When a `destringizer` is provided, GML values are converted to the provided type. + For example, integer nodes can be recovered as shown below: + + >>> J = nx.read_gml("test_path4.gml", destringizer=int) + >>> J.nodes + NodeView((0, 1, 2, 3)) + + """ + + def filter_lines(lines): + for line in lines: + try: + line = line.decode("ascii") + except UnicodeDecodeError as err: + raise NetworkXError("input is not ASCII-encoded") from err + if not isinstance(line, str): + lines = str(lines) + if line and line[-1] == "\n": + line = line[:-1] + yield line + + G = parse_gml_lines(filter_lines(path), label, destringizer) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_gml(lines, label="label", destringizer=None): + """Parse GML graph from a string or iterable. + + Parameters + ---------- + lines : string or iterable of strings + Data in GML format. + + label : string, optional + If not None, the parsed nodes will be renamed according to node + attributes indicated by `label`. Default value: 'label'. + + destringizer : callable, optional + A `destringizer` that recovers values stored as strings in GML. If it + cannot convert a string to a value, a `ValueError` is raised. Default + value : None. + + Returns + ------- + G : NetworkX graph + The parsed graph. + + Raises + ------ + NetworkXError + If the input cannot be parsed. + + See Also + -------- + write_gml, read_gml + + Notes + ----- + This stores nested GML attributes as dictionaries in the NetworkX graph, + node, and edge attribute structures. + + GML files are stored using a 7-bit ASCII encoding with any extended + ASCII characters (iso8859-1) appearing as HTML character entities. + Without specifying a `stringizer`/`destringizer`, the code is capable of + writing `int`/`float`/`str`/`dict`/`list` data as required by the GML + specification. For writing other data types, and for reading data other + than `str` you need to explicitly supply a `stringizer`/`destringizer`. + + For additional documentation on the GML file format, please see the + `GML url `_. + + See the module docstring :mod:`networkx.readwrite.gml` for more details. + """ + + def decode_line(line): + if isinstance(line, bytes): + try: + line.decode("ascii") + except UnicodeDecodeError as err: + raise NetworkXError("input is not ASCII-encoded") from err + if not isinstance(line, str): + line = str(line) + return line + + def filter_lines(lines): + if isinstance(lines, str): + lines = decode_line(lines) + lines = lines.splitlines() + yield from lines + else: + for line in lines: + line = decode_line(line) + if line and line[-1] == "\n": + line = line[:-1] + if line.find("\n") != -1: + raise NetworkXError("input line contains newline") + yield line + + G = parse_gml_lines(filter_lines(lines), label, destringizer) + return G + + +class Pattern(Enum): + """encodes the index of each token-matching pattern in `tokenize`.""" + + KEYS = 0 + REALS = 1 + INTS = 2 + STRINGS = 3 + DICT_START = 4 + DICT_END = 5 + COMMENT_WHITESPACE = 6 + + +class Token(NamedTuple): + category: Pattern + value: Any + line: int + position: int + + +LIST_START_VALUE = "_networkx_list_start" + + +def parse_gml_lines(lines, label, destringizer): + """Parse GML `lines` into a graph.""" + + def tokenize(): + patterns = [ + r"[A-Za-z][0-9A-Za-z_]*\b", # keys + # reals + r"[+-]?(?:[0-9]*\.[0-9]+|[0-9]+\.[0-9]*|INF)(?:[Ee][+-]?[0-9]+)?", + r"[+-]?[0-9]+", # ints + r'".*?"', # strings + r"\[", # dict start + r"\]", # dict end + r"#.*$|\s+", # comments and whitespaces + ] + tokens = re.compile("|".join(f"({pattern})" for pattern in patterns)) + lineno = 0 + multilines = [] # entries spread across multiple lines + for line in lines: + pos = 0 + + # deal with entries spread across multiple lines + # + # should we actually have to deal with escaped "s then do it here + if multilines: + multilines.append(line.strip()) + if line[-1] == '"': # closing multiline entry + # multiline entries will be joined by space. cannot + # reintroduce newlines as this will break the tokenizer + line = " ".join(multilines) + multilines = [] + else: # continued multiline entry + lineno += 1 + continue + else: + if line.count('"') == 1: # opening multiline entry + if line.strip()[0] != '"' and line.strip()[-1] != '"': + # since we expect something like key "value", the " should not be found at ends + # otherwise tokenizer will pick up the formatting mistake. + multilines = [line.rstrip()] + lineno += 1 + continue + + length = len(line) + + while pos < length: + match = tokens.match(line, pos) + if match is None: + m = f"cannot tokenize {line[pos:]} at ({lineno + 1}, {pos + 1})" + raise NetworkXError(m) + for i in range(len(patterns)): + group = match.group(i + 1) + if group is not None: + if i == 0: # keys + value = group.rstrip() + elif i == 1: # reals + value = float(group) + elif i == 2: # ints + value = int(group) + else: + value = group + if i != 6: # comments and whitespaces + yield Token(Pattern(i), value, lineno + 1, pos + 1) + pos += len(group) + break + lineno += 1 + yield Token(None, None, lineno + 1, 1) # EOF + + def unexpected(curr_token, expected): + category, value, lineno, pos = curr_token + value = repr(value) if value is not None else "EOF" + raise NetworkXError(f"expected {expected}, found {value} at ({lineno}, {pos})") + + def consume(curr_token, category, expected): + if curr_token.category == category: + return next(tokens) + unexpected(curr_token, expected) + + def parse_kv(curr_token): + dct = defaultdict(list) + while curr_token.category == Pattern.KEYS: + key = curr_token.value + curr_token = next(tokens) + category = curr_token.category + if category == Pattern.REALS or category == Pattern.INTS: + value = curr_token.value + curr_token = next(tokens) + elif category == Pattern.STRINGS: + value = unescape(curr_token.value[1:-1]) + if destringizer: + try: + value = destringizer(value) + except ValueError: + pass + # Special handling for empty lists and tuples + if value == "()": + value = () + if value == "[]": + value = [] + curr_token = next(tokens) + elif category == Pattern.DICT_START: + curr_token, value = parse_dict(curr_token) + else: + # Allow for string convertible id and label values + if key in ("id", "label", "source", "target"): + try: + # String convert the token value + value = unescape(str(curr_token.value)) + if destringizer: + try: + value = destringizer(value) + except ValueError: + pass + curr_token = next(tokens) + except Exception: + msg = ( + "an int, float, string, '[' or string" + + " convertible ASCII value for node id or label" + ) + unexpected(curr_token, msg) + # Special handling for nan and infinity. Since the gml language + # defines unquoted strings as keys, the numeric and string branches + # are skipped and we end up in this special branch, so we need to + # convert the current token value to a float for NAN and plain INF. + # +/-INF are handled in the pattern for 'reals' in tokenize(). This + # allows labels and values to be nan or infinity, but not keys. + elif curr_token.value in {"NAN", "INF"}: + value = float(curr_token.value) + curr_token = next(tokens) + else: # Otherwise error out + unexpected(curr_token, "an int, float, string or '['") + dct[key].append(value) + + def clean_dict_value(value): + if not isinstance(value, list): + return value + if len(value) == 1: + return value[0] + if value[0] == LIST_START_VALUE: + return value[1:] + return value + + dct = {key: clean_dict_value(value) for key, value in dct.items()} + return curr_token, dct + + def parse_dict(curr_token): + # dict start + curr_token = consume(curr_token, Pattern.DICT_START, "'['") + # dict contents + curr_token, dct = parse_kv(curr_token) + # dict end + curr_token = consume(curr_token, Pattern.DICT_END, "']'") + return curr_token, dct + + def parse_graph(): + curr_token, dct = parse_kv(next(tokens)) + if curr_token.category is not None: # EOF + unexpected(curr_token, "EOF") + if "graph" not in dct: + raise NetworkXError("input contains no graph") + graph = dct["graph"] + if isinstance(graph, list): + raise NetworkXError("input contains more than one graph") + return graph + + tokens = tokenize() + graph = parse_graph() + + directed = graph.pop("directed", False) + multigraph = graph.pop("multigraph", False) + if not multigraph: + G = nx.DiGraph() if directed else nx.Graph() + else: + G = nx.MultiDiGraph() if directed else nx.MultiGraph() + graph_attr = {k: v for k, v in graph.items() if k not in ("node", "edge")} + G.graph.update(graph_attr) + + def pop_attr(dct, category, attr, i): + try: + return dct.pop(attr) + except KeyError as err: + raise NetworkXError(f"{category} #{i} has no {attr!r} attribute") from err + + nodes = graph.get("node", []) + mapping = {} + node_labels = set() + for i, node in enumerate(nodes if isinstance(nodes, list) else [nodes]): + id = pop_attr(node, "node", "id", i) + if id in G: + raise NetworkXError(f"node id {id!r} is duplicated") + if label is not None and label != "id": + node_label = pop_attr(node, "node", label, i) + if node_label in node_labels: + raise NetworkXError(f"node label {node_label!r} is duplicated") + node_labels.add(node_label) + mapping[id] = node_label + G.add_node(id, **node) + + edges = graph.get("edge", []) + for i, edge in enumerate(edges if isinstance(edges, list) else [edges]): + source = pop_attr(edge, "edge", "source", i) + target = pop_attr(edge, "edge", "target", i) + if source not in G: + raise NetworkXError(f"edge #{i} has undefined source {source!r}") + if target not in G: + raise NetworkXError(f"edge #{i} has undefined target {target!r}") + if not multigraph: + if not G.has_edge(source, target): + G.add_edge(source, target, **edge) + else: + arrow = "->" if directed else "--" + msg = f"edge #{i} ({source!r}{arrow}{target!r}) is duplicated" + raise nx.NetworkXError(msg) + else: + key = edge.pop("key", None) + if key is not None and G.has_edge(source, target, key): + arrow = "->" if directed else "--" + msg = f"edge #{i} ({source!r}{arrow}{target!r}, {key!r})" + msg2 = 'Hint: If multigraph add "multigraph 1" to file header.' + raise nx.NetworkXError(msg + " is duplicated\n" + msg2) + G.add_edge(source, target, key, **edge) + + if label is not None and label != "id": + G = nx.relabel_nodes(G, mapping) + return G + + +def literal_stringizer(value): + """Convert a `value` to a Python literal in GML representation. + + Parameters + ---------- + value : object + The `value` to be converted to GML representation. + + Returns + ------- + rep : string + A double-quoted Python literal representing value. Unprintable + characters are replaced by XML character references. + + Raises + ------ + ValueError + If `value` cannot be converted to GML. + + Notes + ----- + The original value can be recovered using the + :func:`networkx.readwrite.gml.literal_destringizer` function. + """ + + def stringize(value): + if isinstance(value, int | bool) or value is None: + if value is True: # GML uses 1/0 for boolean values. + buf.write(str(1)) + elif value is False: + buf.write(str(0)) + else: + buf.write(str(value)) + elif isinstance(value, str): + text = repr(value) + if text[0] != "u": + try: + value.encode("latin1") + except UnicodeEncodeError: + text = "u" + text + buf.write(text) + elif isinstance(value, float | complex | str | bytes): + buf.write(repr(value)) + elif isinstance(value, list): + buf.write("[") + first = True + for item in value: + if not first: + buf.write(",") + else: + first = False + stringize(item) + buf.write("]") + elif isinstance(value, tuple): + if len(value) > 1: + buf.write("(") + first = True + for item in value: + if not first: + buf.write(",") + else: + first = False + stringize(item) + buf.write(")") + elif value: + buf.write("(") + stringize(value[0]) + buf.write(",)") + else: + buf.write("()") + elif isinstance(value, dict): + buf.write("{") + first = True + for key, value in value.items(): + if not first: + buf.write(",") + else: + first = False + stringize(key) + buf.write(":") + stringize(value) + buf.write("}") + elif isinstance(value, set): + buf.write("{") + first = True + for item in value: + if not first: + buf.write(",") + else: + first = False + stringize(item) + buf.write("}") + else: + msg = f"{value!r} cannot be converted into a Python literal" + raise ValueError(msg) + + buf = StringIO() + stringize(value) + return buf.getvalue() + + +def generate_gml(G, stringizer=None): + r"""Generate a single entry of the graph `G` in GML format. + + Parameters + ---------- + G : NetworkX graph + The graph to be converted to GML. + + stringizer : callable, optional + A `stringizer` which converts non-int/non-float/non-dict values into + strings. If it cannot convert a value into a string, it should raise a + `ValueError` to indicate that. Default value: None. + + Returns + ------- + lines: generator of strings + Lines of GML data. Newlines are not appended. + + Raises + ------ + NetworkXError + If `stringizer` cannot convert a value into a string, or the value to + convert is not a string while `stringizer` is None. + + See Also + -------- + literal_stringizer + + Notes + ----- + Graph attributes named 'directed', 'multigraph', 'node' or + 'edge', node attributes named 'id' or 'label', edge attributes + named 'source' or 'target' (or 'key' if `G` is a multigraph) + are ignored because these attribute names are used to encode the graph + structure. + + GML files are stored using a 7-bit ASCII encoding with any extended + ASCII characters (iso8859-1) appearing as HTML character entities. + Without specifying a `stringizer`/`destringizer`, the code is capable of + writing `int`/`float`/`str`/`dict`/`list` data as required by the GML + specification. For writing other data types, and for reading data other + than `str` you need to explicitly supply a `stringizer`/`destringizer`. + + For additional documentation on the GML file format, please see the + `GML url `_. + + See the module docstring :mod:`networkx.readwrite.gml` for more details. + + Examples + -------- + >>> G = nx.Graph() + >>> G.add_node("1") + >>> print("\n".join(nx.generate_gml(G))) + graph [ + node [ + id 0 + label "1" + ] + ] + >>> G = nx.MultiGraph([("a", "b"), ("a", "b")]) + >>> print("\n".join(nx.generate_gml(G))) + graph [ + multigraph 1 + node [ + id 0 + label "a" + ] + node [ + id 1 + label "b" + ] + edge [ + source 0 + target 1 + key 0 + ] + edge [ + source 0 + target 1 + key 1 + ] + ] + """ + valid_keys = re.compile("^[A-Za-z][0-9A-Za-z_]*$") + + def stringize(key, value, ignored_keys, indent, in_list=False): + if not isinstance(key, str): + raise NetworkXError(f"{key!r} is not a string") + if not valid_keys.match(key): + raise NetworkXError(f"{key!r} is not a valid key") + if not isinstance(key, str): + key = str(key) + if key not in ignored_keys: + if isinstance(value, int | bool): + if key == "label": + yield indent + key + ' "' + str(value) + '"' + elif value is True: + # python bool is an instance of int + yield indent + key + " 1" + elif value is False: + yield indent + key + " 0" + # GML only supports signed 32-bit integers + elif value < -(2**31) or value >= 2**31: + yield indent + key + ' "' + str(value) + '"' + else: + yield indent + key + " " + str(value) + elif isinstance(value, float): + text = repr(value).upper() + # GML matches INF to keys, so prepend + to INF. Use repr(float(*)) + # instead of string literal to future proof against changes to repr. + if text == repr(float("inf")).upper(): + text = "+" + text + else: + # GML requires that a real literal contain a decimal point, but + # repr may not output a decimal point when the mantissa is + # integral and hence needs fixing. + epos = text.rfind("E") + if epos != -1 and text.find(".", 0, epos) == -1: + text = text[:epos] + "." + text[epos:] + if key == "label": + yield indent + key + ' "' + text + '"' + else: + yield indent + key + " " + text + elif isinstance(value, dict): + yield indent + key + " [" + next_indent = indent + " " + for key, value in value.items(): + yield from stringize(key, value, (), next_indent) + yield indent + "]" + elif isinstance(value, tuple) and key == "label": + yield indent + key + f' "({",".join(repr(v) for v in value)})"' + elif isinstance(value, list | tuple) and key != "label" and not in_list: + if len(value) == 0: + yield indent + key + " " + f'"{value!r}"' + if len(value) == 1: + yield indent + key + " " + f'"{LIST_START_VALUE}"' + for val in value: + yield from stringize(key, val, (), indent, True) + else: + if stringizer: + try: + value = stringizer(value) + except ValueError as err: + raise NetworkXError( + f"{value!r} cannot be converted into a string" + ) from err + if not isinstance(value, str): + raise NetworkXError(f"{value!r} is not a string") + yield indent + key + ' "' + escape(value) + '"' + + multigraph = G.is_multigraph() + yield "graph [" + + # Output graph attributes + if G.is_directed(): + yield " directed 1" + if multigraph: + yield " multigraph 1" + ignored_keys = {"directed", "multigraph", "node", "edge"} + for attr, value in G.graph.items(): + yield from stringize(attr, value, ignored_keys, " ") + + # Output node data + node_id = dict(zip(G, range(len(G)))) + ignored_keys = {"id", "label"} + for node, attrs in G.nodes.items(): + yield " node [" + yield " id " + str(node_id[node]) + yield from stringize("label", node, (), " ") + for attr, value in attrs.items(): + yield from stringize(attr, value, ignored_keys, " ") + yield " ]" + + # Output edge data + ignored_keys = {"source", "target"} + kwargs = {"data": True} + if multigraph: + ignored_keys.add("key") + kwargs["keys"] = True + for e in G.edges(**kwargs): + yield " edge [" + yield " source " + str(node_id[e[0]]) + yield " target " + str(node_id[e[1]]) + if multigraph: + yield from stringize("key", e[2], (), " ") + for attr, value in e[-1].items(): + yield from stringize(attr, value, ignored_keys, " ") + yield " ]" + yield "]" + + +@open_file(1, mode="wb") +def write_gml(G, path, stringizer=None): + """Write a graph `G` in GML format to the file or file handle `path`. + + Parameters + ---------- + G : NetworkX graph + The graph to be converted to GML. + + path : string or file + Filename or file handle to write to. + Filenames ending in .gz or .bz2 will be compressed. + + stringizer : callable, optional + A `stringizer` which converts non-int/non-float/non-dict values into + strings. If it cannot convert a value into a string, it should raise a + `ValueError` to indicate that. Default value: None. + + Raises + ------ + NetworkXError + If `stringizer` cannot convert a value into a string, or the value to + convert is not a string while `stringizer` is None. + + See Also + -------- + read_gml, generate_gml + literal_stringizer + + Notes + ----- + Graph attributes named 'directed', 'multigraph', 'node' or + 'edge', node attributes named 'id' or 'label', edge attributes + named 'source' or 'target' (or 'key' if `G` is a multigraph) + are ignored because these attribute names are used to encode the graph + structure. + + GML files are stored using a 7-bit ASCII encoding with any extended + ASCII characters (iso8859-1) appearing as HTML character entities. + Without specifying a `stringizer`/`destringizer`, the code is capable of + writing `int`/`float`/`str`/`dict`/`list` data as required by the GML + specification. For writing other data types, and for reading data other + than `str` you need to explicitly supply a `stringizer`/`destringizer`. + + Note that while we allow non-standard GML to be read from a file, we make + sure to write GML format. In particular, underscores are not allowed in + attribute names. + For additional documentation on the GML file format, please see the + `GML url `_. + + See the module docstring :mod:`networkx.readwrite.gml` for more details. + + Examples + -------- + >>> G = nx.path_graph(5) + >>> nx.write_gml(G, "test_path5.gml") + + Filenames ending in .gz or .bz2 will be compressed. + + >>> nx.write_gml(G, "test_path5.gml.gz") + """ + for line in generate_gml(G, stringizer): + path.write((line + "\n").encode("ascii")) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/graph6.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/graph6.py new file mode 100644 index 0000000..cdc2925 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/graph6.py @@ -0,0 +1,427 @@ +# Original author: D. Eppstein, UC Irvine, August 12, 2003. +# The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. +"""Functions for reading and writing graphs in the *graph6* format. + +The *graph6* file format is suitable for small graphs or large dense +graphs. For large sparse graphs, use the *sparse6* format. + +For more information, see the `graph6`_ homepage. + +.. _graph6: http://users.cecs.anu.edu.au/~bdm/data/formats.html + +""" + +from itertools import islice + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils import not_implemented_for, open_file + +__all__ = ["from_graph6_bytes", "read_graph6", "to_graph6_bytes", "write_graph6"] + + +def _generate_graph6_bytes(G, nodes, header): + """Yield bytes in the graph6 encoding of a graph. + + `G` is an undirected simple graph. `nodes` is the list of nodes for + which the node-induced subgraph will be encoded; if `nodes` is the + list of all nodes in the graph, the entire graph will be + encoded. `header` is a Boolean that specifies whether to generate + the header ``b'>>graph6<<'`` before the remaining data. + + This function generates `bytes` objects in the following order: + + 1. the header (if requested), + 2. the encoding of the number of nodes, + 3. each character, one-at-a-time, in the encoding of the requested + node-induced subgraph, + 4. a newline character. + + This function raises :exc:`ValueError` if the graph is too large for + the graph6 format (that is, greater than ``2 ** 36`` nodes). + + """ + n = len(G) + if n >= 2**36: + raise ValueError( + "graph6 is only defined if number of nodes is less than 2 ** 36" + ) + if header: + yield b">>graph6<<" + for d in n_to_data(n): + yield str.encode(chr(d + 63)) + # This generates the same as `(v in G[u] for u, v in combinations(G, 2))`, + # but in "column-major" order instead of "row-major" order. + bits = (nodes[j] in G[nodes[i]] for j in range(1, n) for i in range(j)) + chunk = list(islice(bits, 6)) + while chunk: + d = sum(b << 5 - i for i, b in enumerate(chunk)) + yield str.encode(chr(d + 63)) + chunk = list(islice(bits, 6)) + yield b"\n" + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_graph6_bytes(bytes_in): + """Read a simple undirected graph in graph6 format from bytes. + + Parameters + ---------- + bytes_in : bytes + Data in graph6 format + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If `bytes_in` is unable to be parsed in graph6 format + + ValueError + If any character ``c`` in bytes_in does not satisfy + ``63 <= ord(c) < 127``. + + Examples + -------- + >>> G = nx.from_graph6_bytes(b"A_") + >>> sorted(G.edges()) + [(0, 1)] + + Notes + ----- + Per the graph6 spec, the header (e.g. ``b'>>graph6<<'``) must not be + followed by a newline character. + + See Also + -------- + read_graph6, write_graph6 + + References + ---------- + .. [1] Graph6 specification + + + """ + + def bits(): + """Returns sequence of individual bits from 6-bit-per-value + list of data values.""" + for d in data: + for i in [5, 4, 3, 2, 1, 0]: + yield (d >> i) & 1 + + # Ignore trailing newline + bytes_in = bytes_in.rstrip(b"\n") + + if bytes_in.startswith(b">>graph6<<"): + bytes_in = bytes_in[10:] + + data = [c - 63 for c in bytes_in] + if any(c > 63 for c in data): + raise ValueError("each input character must be in range(63, 127)") + + n, data = data_to_n(data) + nd = (n * (n - 1) // 2 + 5) // 6 + if len(data) != nd: + raise NetworkXError( + f"Expected {n * (n - 1) // 2} bits but got {len(data) * 6} in graph6" + ) + + G = nx.Graph() + G.add_nodes_from(range(n)) + for (i, j), b in zip(((i, j) for j in range(1, n) for i in range(j)), bits()): + if b: + G.add_edge(i, j) + + return G + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def to_graph6_bytes(G, nodes=None, header=True): + """Convert a simple undirected graph to bytes in graph6 format. + + Parameters + ---------- + G : Graph (undirected) + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by ``G.nodes()`` is used. + + header: bool + If True add '>>graph6<<' bytes to head of data. + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + ValueError + If the graph has at least ``2 ** 36`` nodes; the graph6 format + is only defined for graphs of order less than ``2 ** 36``. + + Examples + -------- + >>> nx.to_graph6_bytes(nx.path_graph(2)) + b'>>graph6< + + """ + if nodes is not None: + G = G.subgraph(nodes) + H = nx.convert_node_labels_to_integers(G) + nodes = sorted(H.nodes()) + return b"".join(_generate_graph6_bytes(H, nodes, header)) + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_graph6(path): + """Read simple undirected graphs in graph6 format from path. + + Parameters + ---------- + path : file or string + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + Returns + ------- + G : Graph or list of Graphs + If the file contains multiple lines then a list of graphs is returned + + Raises + ------ + NetworkXError + If the string is unable to be parsed in graph6 format + + Examples + -------- + You can read a graph6 file by giving the path to the file:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(delete=False) as f: + ... _ = f.write(b">>graph6<>> list(G.edges()) + [(0, 1)] + + You can also read a graph6 file by giving an open file-like object:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile() as f: + ... _ = f.write(b">>graph6<>> list(G.edges()) + [(0, 1)] + + See Also + -------- + from_graph6_bytes, write_graph6 + + References + ---------- + .. [1] Graph6 specification + + + """ + glist = [] + for line in path: + line = line.strip() + if not len(line): + continue + glist.append(from_graph6_bytes(line)) + if len(glist) == 1: + return glist[0] + else: + return glist + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +@open_file(1, mode="wb") +def write_graph6(G, path, nodes=None, header=True): + """Write a simple undirected graph to a path in graph6 format. + + Parameters + ---------- + G : Graph (undirected) + + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by ``G.nodes()`` is used. + + header: bool + If True add '>>graph6<<' string to head of data + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + ValueError + If the graph has at least ``2 ** 36`` nodes; the graph6 format + is only defined for graphs of order less than ``2 ** 36``. + + Examples + -------- + You can write a graph6 file by giving the path to a file:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(delete=False) as f: + ... nx.write_graph6(nx.path_graph(2), f.name) + ... _ = f.seek(0) + ... print(f.read()) + b'>>graph6< + + """ + return write_graph6_file(G, path, nodes=nodes, header=header) + + +@not_implemented_for("directed") +@not_implemented_for("multigraph") +def write_graph6_file(G, f, nodes=None, header=True): + """Write a simple undirected graph to a file-like object in graph6 format. + + Parameters + ---------- + G : Graph (undirected) + + f : file-like object + The file to write. + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by ``G.nodes()`` is used. + + header: bool + If True add '>>graph6<<' string to head of data + + Raises + ------ + NetworkXNotImplemented + If the graph is directed or is a multigraph. + + ValueError + If the graph has at least ``2 ** 36`` nodes; the graph6 format + is only defined for graphs of order less than ``2 ** 36``. + + Examples + -------- + You can write a graph6 file by giving an open file-like object:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile() as f: + ... nx.write_graph6(nx.path_graph(2), f) + ... _ = f.seek(0) + ... print(f.read()) + b'>>graph6< + + """ + if nodes is not None: + G = G.subgraph(nodes) + H = nx.convert_node_labels_to_integers(G) + nodes = sorted(H.nodes()) + for b in _generate_graph6_bytes(H, nodes, header): + f.write(b) + + +def data_to_n(data): + """Read initial one-, four- or eight-unit value from graph6 + integer sequence. + + Return (value, rest of seq.)""" + if data[0] <= 62: + return data[0], data[1:] + if data[1] <= 62: + return (data[1] << 12) + (data[2] << 6) + data[3], data[4:] + return ( + (data[2] << 30) + + (data[3] << 24) + + (data[4] << 18) + + (data[5] << 12) + + (data[6] << 6) + + data[7], + data[8:], + ) + + +def n_to_data(n): + """Convert an integer to one-, four- or eight-unit graph6 sequence. + + This function is undefined if `n` is not in ``range(2 ** 36)``. + + """ + if n <= 62: + return [n] + elif n <= 258047: + return [63, (n >> 12) & 0x3F, (n >> 6) & 0x3F, n & 0x3F] + else: # if n <= 68719476735: + return [ + 63, + 63, + (n >> 30) & 0x3F, + (n >> 24) & 0x3F, + (n >> 18) & 0x3F, + (n >> 12) & 0x3F, + (n >> 6) & 0x3F, + n & 0x3F, + ] diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/graphml.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/graphml.py new file mode 100644 index 0000000..6ca1741 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/graphml.py @@ -0,0 +1,1053 @@ +""" +******* +GraphML +******* +Read and write graphs in GraphML format. + +.. warning:: + + This parser uses the standard xml library present in Python, which is + insecure - see :external+python:mod:`xml` for additional information. + Only parse GraphML files you trust. + +This implementation does not support mixed graphs (directed and unidirected +edges together), hyperedges, nested graphs, or ports. + +"GraphML is a comprehensive and easy-to-use file format for graphs. It +consists of a language core to describe the structural properties of a +graph and a flexible extension mechanism to add application-specific +data. Its main features include support of + + * directed, undirected, and mixed graphs, + * hypergraphs, + * hierarchical graphs, + * graphical representations, + * references to external data, + * application-specific attribute data, and + * light-weight parsers. + +Unlike many other file formats for graphs, GraphML does not use a +custom syntax. Instead, it is based on XML and hence ideally suited as +a common denominator for all kinds of services generating, archiving, +or processing graphs." + +http://graphml.graphdrawing.org/ + +Format +------ +GraphML is an XML format. See +http://graphml.graphdrawing.org/specification.html for the specification and +http://graphml.graphdrawing.org/primer/graphml-primer.html +for examples. +""" + +import warnings +from collections import defaultdict + +import networkx as nx +from networkx.utils import open_file + +__all__ = [ + "write_graphml", + "read_graphml", + "generate_graphml", + "write_graphml_xml", + "write_graphml_lxml", + "parse_graphml", + "GraphMLWriter", + "GraphMLReader", +] + + +@open_file(1, mode="wb") +def write_graphml_xml( + G, + path, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + edge_id_from_attribute=None, +): + """Write G in GraphML XML format to path + + Parameters + ---------- + G : graph + A networkx graph + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + encoding : string (optional) + Encoding for text data. + prettyprint : bool (optional) + If True use line breaks and indenting in output XML. + infer_numeric_types : boolean + Determine if numeric types should be generalized. + For example, if edges have both int and float 'weight' attributes, + we infer in GraphML that both are floats. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. + edge_id_from_attribute : dict key (optional) + If provided, the graphml edge id is set by looking up the corresponding + edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data, + the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_graphml(G, "test.graphml") + + Notes + ----- + This implementation does not support mixed graphs (directed + and unidirected edges together) hyperedges, nested graphs, or ports. + """ + writer = GraphMLWriter( + encoding=encoding, + prettyprint=prettyprint, + infer_numeric_types=infer_numeric_types, + named_key_ids=named_key_ids, + edge_id_from_attribute=edge_id_from_attribute, + ) + writer.add_graph_element(G) + writer.dump(path) + + +@open_file(1, mode="wb") +def write_graphml_lxml( + G, + path, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + edge_id_from_attribute=None, +): + """Write G in GraphML XML format to path + + This function uses the LXML framework and should be faster than + the version using the xml library. + + Parameters + ---------- + G : graph + A networkx graph + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + encoding : string (optional) + Encoding for text data. + prettyprint : bool (optional) + If True use line breaks and indenting in output XML. + infer_numeric_types : boolean + Determine if numeric types should be generalized. + For example, if edges have both int and float 'weight' attributes, + we infer in GraphML that both are floats. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. + edge_id_from_attribute : dict key (optional) + If provided, the graphml edge id is set by looking up the corresponding + edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data, + the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_graphml_lxml(G, "fourpath.graphml") + + Notes + ----- + This implementation does not support mixed graphs (directed + and unidirected edges together) hyperedges, nested graphs, or ports. + """ + try: + import lxml.etree as lxmletree + except ImportError: + return write_graphml_xml( + G, + path, + encoding, + prettyprint, + infer_numeric_types, + named_key_ids, + edge_id_from_attribute, + ) + + writer = GraphMLWriterLxml( + path, + graph=G, + encoding=encoding, + prettyprint=prettyprint, + infer_numeric_types=infer_numeric_types, + named_key_ids=named_key_ids, + edge_id_from_attribute=edge_id_from_attribute, + ) + writer.dump() + + +def generate_graphml( + G, + encoding="utf-8", + prettyprint=True, + named_key_ids=False, + edge_id_from_attribute=None, +): + """Generate GraphML lines for G + + Parameters + ---------- + G : graph + A networkx graph + encoding : string (optional) + Encoding for text data. + prettyprint : bool (optional) + If True use line breaks and indenting in output XML. + named_key_ids : bool (optional) + If True use attr.name as value for key elements' id attribute. + edge_id_from_attribute : dict key (optional) + If provided, the graphml edge id is set by looking up the corresponding + edge data attribute keyed by this parameter. If `None` or the key does not exist in edge data, + the edge id is set by the edge key if `G` is a MultiGraph, else the edge id is left unset. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> linefeed = chr(10) # linefeed = \n + >>> s = linefeed.join(nx.generate_graphml(G)) + >>> for line in nx.generate_graphml(G): # doctest: +SKIP + ... print(line) + + Notes + ----- + This implementation does not support mixed graphs (directed and unidirected + edges together) hyperedges, nested graphs, or ports. + """ + writer = GraphMLWriter( + encoding=encoding, + prettyprint=prettyprint, + named_key_ids=named_key_ids, + edge_id_from_attribute=edge_id_from_attribute, + ) + writer.add_graph_element(G) + yield from str(writer).splitlines() + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_graphml(path, node_type=str, edge_key_type=int, force_multigraph=False): + """Read graph in GraphML format from path. + + Parameters + ---------- + path : file or string + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + node_type: Python type (default: str) + Convert node ids to this type + + edge_key_type: Python type (default: int) + Convert graphml edge ids to this type. Multigraphs use id as edge key. + Non-multigraphs add to edge attribute dict with name "id". + + force_multigraph : bool (default: False) + If True, return a multigraph with edge keys. If False (the default) + return a multigraph when multiedges are in the graph. + + Returns + ------- + graph: NetworkX graph + If parallel edges are present or `force_multigraph=True` then + a MultiGraph or MultiDiGraph is returned. Otherwise a Graph/DiGraph. + The returned graph is directed if the file indicates it should be. + + Notes + ----- + Default node and edge attributes are not propagated to each node and edge. + They can be obtained from `G.graph` and applied to node and edge attributes + if desired using something like this: + + >>> default_color = G.graph["node_default"]["color"] # doctest: +SKIP + >>> for node, data in G.nodes(data=True): # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + >>> default_color = G.graph["edge_default"]["color"] # doctest: +SKIP + >>> for u, v, data in G.edges(data=True): # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + + This implementation does not support mixed graphs (directed and unidirected + edges together), hypergraphs, nested graphs, or ports. + + For multigraphs the GraphML edge "id" will be used as the edge + key. If not specified then they "key" attribute will be used. If + there is no "key" attribute a default NetworkX multigraph edge key + will be provided. + + Files with the yEd "yfiles" extension can be read. The type of the node's + shape is preserved in the `shape_type` node attribute. + + yEd compressed files ("file.graphmlz" extension) can be read by renaming + the file to "file.graphml.gz". + + """ + reader = GraphMLReader(node_type, edge_key_type, force_multigraph) + # need to check for multiple graphs + glist = list(reader(path=path)) + if len(glist) == 0: + # If no graph comes back, try looking for an incomplete header + header = b'' + path.seek(0) + old_bytes = path.read() + new_bytes = old_bytes.replace(b"", header) + glist = list(reader(string=new_bytes)) + if len(glist) == 0: + raise nx.NetworkXError("file not successfully read as graphml") + return glist[0] + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_graphml( + graphml_string, node_type=str, edge_key_type=int, force_multigraph=False +): + """Read graph in GraphML format from string. + + Parameters + ---------- + graphml_string : string + String containing graphml information + (e.g., contents of a graphml file). + + node_type: Python type (default: str) + Convert node ids to this type + + edge_key_type: Python type (default: int) + Convert graphml edge ids to this type. Multigraphs use id as edge key. + Non-multigraphs add to edge attribute dict with name "id". + + force_multigraph : bool (default: False) + If True, return a multigraph with edge keys. If False (the default) + return a multigraph when multiedges are in the graph. + + + Returns + ------- + graph: NetworkX graph + If no parallel edges are found a Graph or DiGraph is returned. + Otherwise a MultiGraph or MultiDiGraph is returned. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> linefeed = chr(10) # linefeed = \n + >>> s = linefeed.join(nx.generate_graphml(G)) + >>> H = nx.parse_graphml(s) + + Notes + ----- + Default node and edge attributes are not propagated to each node and edge. + They can be obtained from `G.graph` and applied to node and edge attributes + if desired using something like this: + + >>> default_color = G.graph["node_default"]["color"] # doctest: +SKIP + >>> for node, data in G.nodes(data=True): # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + >>> default_color = G.graph["edge_default"]["color"] # doctest: +SKIP + >>> for u, v, data in G.edges(data=True): # doctest: +SKIP + ... if "color" not in data: + ... data["color"] = default_color + + This implementation does not support mixed graphs (directed and unidirected + edges together), hypergraphs, nested graphs, or ports. + + For multigraphs the GraphML edge "id" will be used as the edge + key. If not specified then they "key" attribute will be used. If + there is no "key" attribute a default NetworkX multigraph edge key + will be provided. + + """ + reader = GraphMLReader(node_type, edge_key_type, force_multigraph) + # need to check for multiple graphs + glist = list(reader(string=graphml_string)) + if len(glist) == 0: + # If no graph comes back, try looking for an incomplete header + header = '' + new_string = graphml_string.replace("", header) + glist = list(reader(string=new_string)) + if len(glist) == 0: + raise nx.NetworkXError("file not successfully read as graphml") + return glist[0] + + +class GraphML: + NS_GRAPHML = "http://graphml.graphdrawing.org/xmlns" + NS_XSI = "http://www.w3.org/2001/XMLSchema-instance" + # xmlns:y="http://www.yworks.com/xml/graphml" + NS_Y = "http://www.yworks.com/xml/graphml" + SCHEMALOCATION = " ".join( + [ + "http://graphml.graphdrawing.org/xmlns", + "http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd", + ] + ) + + def construct_types(self): + types = [ + (int, "integer"), # for Gephi GraphML bug + (str, "yfiles"), + (str, "string"), + (int, "int"), + (int, "long"), + (float, "float"), + (float, "double"), + (bool, "boolean"), + ] + + # These additions to types allow writing numpy types + try: + import numpy as np + except: + pass + else: + # prepend so that python types are created upon read (last entry wins) + types = [ + (np.float64, "float"), + (np.float32, "float"), + (np.float16, "float"), + (np.int_, "int"), + (np.int8, "int"), + (np.int16, "int"), + (np.int32, "int"), + (np.int64, "int"), + (np.uint8, "int"), + (np.uint16, "int"), + (np.uint32, "int"), + (np.uint64, "int"), + (np.int_, "int"), + (np.intc, "int"), + (np.intp, "int"), + ] + types + + self.xml_type = dict(types) + self.python_type = dict(reversed(a) for a in types) + + # This page says that data types in GraphML follow Java(TM). + # http://graphml.graphdrawing.org/primer/graphml-primer.html#AttributesDefinition + # true and false are the only boolean literals: + # http://en.wikibooks.org/wiki/Java_Programming/Literals#Boolean_Literals + convert_bool = { + # We use data.lower() in actual use. + "true": True, + "false": False, + # Include integer strings for convenience. + "0": False, + 0: False, + "1": True, + 1: True, + } + + def get_xml_type(self, key): + """Wrapper around the xml_type dict that raises a more informative + exception message when a user attempts to use data of a type not + supported by GraphML.""" + try: + return self.xml_type[key] + except KeyError as err: + raise TypeError( + f"GraphML does not support type {key} as data values." + ) from err + + +class GraphMLWriter(GraphML): + def __init__( + self, + graph=None, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + edge_id_from_attribute=None, + ): + self.construct_types() + from xml.etree.ElementTree import Element + + self.myElement = Element + + self.infer_numeric_types = infer_numeric_types + self.prettyprint = prettyprint + self.named_key_ids = named_key_ids + self.edge_id_from_attribute = edge_id_from_attribute + self.encoding = encoding + self.xml = self.myElement( + "graphml", + { + "xmlns": self.NS_GRAPHML, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + }, + ) + self.keys = {} + self.attributes = defaultdict(list) + self.attribute_types = defaultdict(set) + + if graph is not None: + self.add_graph_element(graph) + + def __str__(self): + from xml.etree.ElementTree import tostring + + if self.prettyprint: + self.indent(self.xml) + s = tostring(self.xml).decode(self.encoding) + return s + + def attr_type(self, name, scope, value): + """Infer the attribute type of data named name. Currently this only + supports inference of numeric types. + + If self.infer_numeric_types is false, type is used. Otherwise, pick the + most general of types found across all values with name and scope. This + means edges with data named 'weight' are treated separately from nodes + with data named 'weight'. + """ + if self.infer_numeric_types: + types = self.attribute_types[(name, scope)] + + if len(types) > 1: + types = {self.get_xml_type(t) for t in types} + if "string" in types: + return str + elif "float" in types or "double" in types: + return float + else: + return int + else: + return list(types)[0] + else: + return type(value) + + def get_key(self, name, attr_type, scope, default): + keys_key = (name, attr_type, scope) + try: + return self.keys[keys_key] + except KeyError: + if self.named_key_ids: + new_id = name + else: + new_id = f"d{len(list(self.keys))}" + + self.keys[keys_key] = new_id + key_kwargs = { + "id": new_id, + "for": scope, + "attr.name": name, + "attr.type": attr_type, + } + key_element = self.myElement("key", **key_kwargs) + # add subelement for data default value if present + if default is not None: + default_element = self.myElement("default") + default_element.text = str(default) + key_element.append(default_element) + self.xml.insert(0, key_element) + return new_id + + def add_data(self, name, element_type, value, scope="all", default=None): + """ + Make a data element for an edge or a node. Keep a log of the + type in the keys table. + """ + if element_type not in self.xml_type: + raise nx.NetworkXError( + f"GraphML writer does not support {element_type} as data values." + ) + keyid = self.get_key(name, self.get_xml_type(element_type), scope, default) + data_element = self.myElement("data", key=keyid) + data_element.text = str(value) + return data_element + + def add_attributes(self, scope, xml_obj, data, default): + """Appends attribute data to edges or nodes, and stores type information + to be added later. See add_graph_element. + """ + for k, v in data.items(): + self.attribute_types[(str(k), scope)].add(type(v)) + self.attributes[xml_obj].append([k, v, scope, default.get(k)]) + + def add_nodes(self, G, graph_element): + default = G.graph.get("node_default", {}) + for node, data in G.nodes(data=True): + node_element = self.myElement("node", id=str(node)) + self.add_attributes("node", node_element, data, default) + graph_element.append(node_element) + + def add_edges(self, G, graph_element): + if G.is_multigraph(): + for u, v, key, data in G.edges(data=True, keys=True): + edge_element = self.myElement( + "edge", + source=str(u), + target=str(v), + id=str(data.get(self.edge_id_from_attribute)) + if self.edge_id_from_attribute + and self.edge_id_from_attribute in data + else str(key), + ) + default = G.graph.get("edge_default", {}) + self.add_attributes("edge", edge_element, data, default) + graph_element.append(edge_element) + else: + for u, v, data in G.edges(data=True): + if self.edge_id_from_attribute and self.edge_id_from_attribute in data: + # select attribute to be edge id + edge_element = self.myElement( + "edge", + source=str(u), + target=str(v), + id=str(data.get(self.edge_id_from_attribute)), + ) + else: + # default: no edge id + edge_element = self.myElement("edge", source=str(u), target=str(v)) + default = G.graph.get("edge_default", {}) + self.add_attributes("edge", edge_element, data, default) + graph_element.append(edge_element) + + def add_graph_element(self, G): + """ + Serialize graph G in GraphML to the stream. + """ + if G.is_directed(): + default_edge_type = "directed" + else: + default_edge_type = "undirected" + + graphid = G.graph.pop("id", None) + if graphid is None: + graph_element = self.myElement("graph", edgedefault=default_edge_type) + else: + graph_element = self.myElement( + "graph", edgedefault=default_edge_type, id=graphid + ) + default = {} + data = { + k: v + for (k, v) in G.graph.items() + if k not in ["node_default", "edge_default"] + } + self.add_attributes("graph", graph_element, data, default) + self.add_nodes(G, graph_element) + self.add_edges(G, graph_element) + + # self.attributes contains a mapping from XML Objects to a list of + # data that needs to be added to them. + # We postpone processing in order to do type inference/generalization. + # See self.attr_type + for xml_obj, data in self.attributes.items(): + for k, v, scope, default in data: + xml_obj.append( + self.add_data( + str(k), self.attr_type(k, scope, v), str(v), scope, default + ) + ) + self.xml.append(graph_element) + + def add_graphs(self, graph_list): + """Add many graphs to this GraphML document.""" + for G in graph_list: + self.add_graph_element(G) + + def dump(self, stream): + from xml.etree.ElementTree import ElementTree + + if self.prettyprint: + self.indent(self.xml) + document = ElementTree(self.xml) + document.write(stream, encoding=self.encoding, xml_declaration=True) + + def indent(self, elem, level=0): + # in-place prettyprint formatter + i = "\n" + level * " " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + self.indent(elem, level + 1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + +class IncrementalElement: + """Wrapper for _IncrementalWriter providing an Element like interface. + + This wrapper does not intend to be a complete implementation but rather to + deal with those calls used in GraphMLWriter. + """ + + def __init__(self, xml, prettyprint): + self.xml = xml + self.prettyprint = prettyprint + + def append(self, element): + self.xml.write(element, pretty_print=self.prettyprint) + + +class GraphMLWriterLxml(GraphMLWriter): + def __init__( + self, + path, + graph=None, + encoding="utf-8", + prettyprint=True, + infer_numeric_types=False, + named_key_ids=False, + edge_id_from_attribute=None, + ): + self.construct_types() + import lxml.etree as lxmletree + + self.myElement = lxmletree.Element + + self._encoding = encoding + self._prettyprint = prettyprint + self.named_key_ids = named_key_ids + self.edge_id_from_attribute = edge_id_from_attribute + self.infer_numeric_types = infer_numeric_types + + self._xml_base = lxmletree.xmlfile(path, encoding=encoding) + self._xml = self._xml_base.__enter__() + self._xml.write_declaration() + + # We need to have a xml variable that support insertion. This call is + # used for adding the keys to the document. + # We will store those keys in a plain list, and then after the graph + # element is closed we will add them to the main graphml element. + self.xml = [] + self._keys = self.xml + self._graphml = self._xml.element( + "graphml", + { + "xmlns": self.NS_GRAPHML, + "xmlns:xsi": self.NS_XSI, + "xsi:schemaLocation": self.SCHEMALOCATION, + }, + ) + self._graphml.__enter__() + self.keys = {} + self.attribute_types = defaultdict(set) + + if graph is not None: + self.add_graph_element(graph) + + def add_graph_element(self, G): + """ + Serialize graph G in GraphML to the stream. + """ + if G.is_directed(): + default_edge_type = "directed" + else: + default_edge_type = "undirected" + + graphid = G.graph.pop("id", None) + if graphid is None: + graph_element = self._xml.element("graph", edgedefault=default_edge_type) + else: + graph_element = self._xml.element( + "graph", edgedefault=default_edge_type, id=graphid + ) + + # gather attributes types for the whole graph + # to find the most general numeric format needed. + # Then pass through attributes to create key_id for each. + graphdata = { + k: v + for k, v in G.graph.items() + if k not in ("node_default", "edge_default") + } + node_default = G.graph.get("node_default", {}) + edge_default = G.graph.get("edge_default", {}) + # Graph attributes + for k, v in graphdata.items(): + self.attribute_types[(str(k), "graph")].add(type(v)) + for k, v in graphdata.items(): + element_type = self.get_xml_type(self.attr_type(k, "graph", v)) + self.get_key(str(k), element_type, "graph", None) + # Nodes and data + for node, d in G.nodes(data=True): + for k, v in d.items(): + self.attribute_types[(str(k), "node")].add(type(v)) + for node, d in G.nodes(data=True): + for k, v in d.items(): + T = self.get_xml_type(self.attr_type(k, "node", v)) + self.get_key(str(k), T, "node", node_default.get(k)) + # Edges and data + if G.is_multigraph(): + for u, v, ekey, d in G.edges(keys=True, data=True): + for k, v in d.items(): + self.attribute_types[(str(k), "edge")].add(type(v)) + for u, v, ekey, d in G.edges(keys=True, data=True): + for k, v in d.items(): + T = self.get_xml_type(self.attr_type(k, "edge", v)) + self.get_key(str(k), T, "edge", edge_default.get(k)) + else: + for u, v, d in G.edges(data=True): + for k, v in d.items(): + self.attribute_types[(str(k), "edge")].add(type(v)) + for u, v, d in G.edges(data=True): + for k, v in d.items(): + T = self.get_xml_type(self.attr_type(k, "edge", v)) + self.get_key(str(k), T, "edge", edge_default.get(k)) + + # Now add attribute keys to the xml file + for key in self.xml: + self._xml.write(key, pretty_print=self._prettyprint) + + # The incremental_writer writes each node/edge as it is created + incremental_writer = IncrementalElement(self._xml, self._prettyprint) + with graph_element: + self.add_attributes("graph", incremental_writer, graphdata, {}) + self.add_nodes(G, incremental_writer) # adds attributes too + self.add_edges(G, incremental_writer) # adds attributes too + + def add_attributes(self, scope, xml_obj, data, default): + """Appends attribute data.""" + for k, v in data.items(): + data_element = self.add_data( + str(k), self.attr_type(str(k), scope, v), str(v), scope, default.get(k) + ) + xml_obj.append(data_element) + + def __str__(self): + return object.__str__(self) + + def dump(self, stream=None): + self._graphml.__exit__(None, None, None) + self._xml_base.__exit__(None, None, None) + + +# default is lxml is present. +write_graphml = write_graphml_lxml + + +class GraphMLReader(GraphML): + """Read a GraphML document. Produces NetworkX graph objects.""" + + def __init__(self, node_type=str, edge_key_type=int, force_multigraph=False): + self.construct_types() + self.node_type = node_type + self.edge_key_type = edge_key_type + self.multigraph = force_multigraph # If False, test for multiedges + self.edge_ids = {} # dict mapping (u,v) tuples to edge id attributes + + def __call__(self, path=None, string=None): + from xml.etree.ElementTree import ElementTree, fromstring + + if path is not None: + self.xml = ElementTree(file=path) + elif string is not None: + self.xml = fromstring(string) + else: + raise ValueError("Must specify either 'path' or 'string' as kwarg") + (keys, defaults) = self.find_graphml_keys(self.xml) + for g in self.xml.findall(f"{{{self.NS_GRAPHML}}}graph"): + yield self.make_graph(g, keys, defaults) + + def make_graph(self, graph_xml, graphml_keys, defaults, G=None): + # set default graph type + edgedefault = graph_xml.get("edgedefault", None) + if G is None: + if edgedefault == "directed": + G = nx.MultiDiGraph() + else: + G = nx.MultiGraph() + # set defaults for graph attributes + G.graph["node_default"] = {} + G.graph["edge_default"] = {} + for key_id, value in defaults.items(): + key_for = graphml_keys[key_id]["for"] + name = graphml_keys[key_id]["name"] + python_type = graphml_keys[key_id]["type"] + if key_for == "node": + G.graph["node_default"].update({name: python_type(value)}) + if key_for == "edge": + G.graph["edge_default"].update({name: python_type(value)}) + # hyperedges are not supported + hyperedge = graph_xml.find(f"{{{self.NS_GRAPHML}}}hyperedge") + if hyperedge is not None: + raise nx.NetworkXError("GraphML reader doesn't support hyperedges") + # add nodes + for node_xml in graph_xml.findall(f"{{{self.NS_GRAPHML}}}node"): + self.add_node(G, node_xml, graphml_keys, defaults) + # add edges + for edge_xml in graph_xml.findall(f"{{{self.NS_GRAPHML}}}edge"): + self.add_edge(G, edge_xml, graphml_keys) + # add graph data + data = self.decode_data_elements(graphml_keys, graph_xml) + G.graph.update(data) + + # switch to Graph or DiGraph if no parallel edges were found + if self.multigraph: + return G + + G = nx.DiGraph(G) if G.is_directed() else nx.Graph(G) + # add explicit edge "id" from file as attribute in NX graph. + nx.set_edge_attributes(G, values=self.edge_ids, name="id") + return G + + def add_node(self, G, node_xml, graphml_keys, defaults): + """Add a node to the graph.""" + # warn on finding unsupported ports tag + ports = node_xml.find(f"{{{self.NS_GRAPHML}}}port") + if ports is not None: + warnings.warn("GraphML port tag not supported.") + # find the node by id and cast it to the appropriate type + node_id = self.node_type(node_xml.get("id")) + # get data/attributes for node + data = self.decode_data_elements(graphml_keys, node_xml) + G.add_node(node_id, **data) + # get child nodes + if node_xml.attrib.get("yfiles.foldertype") == "group": + graph_xml = node_xml.find(f"{{{self.NS_GRAPHML}}}graph") + self.make_graph(graph_xml, graphml_keys, defaults, G) + + def add_edge(self, G, edge_element, graphml_keys): + """Add an edge to the graph.""" + # warn on finding unsupported ports tag + ports = edge_element.find(f"{{{self.NS_GRAPHML}}}port") + if ports is not None: + warnings.warn("GraphML port tag not supported.") + + # raise error if we find mixed directed and undirected edges + directed = edge_element.get("directed") + if G.is_directed() and directed == "false": + msg = "directed=false edge found in directed graph." + raise nx.NetworkXError(msg) + if (not G.is_directed()) and directed == "true": + msg = "directed=true edge found in undirected graph." + raise nx.NetworkXError(msg) + + source = self.node_type(edge_element.get("source")) + target = self.node_type(edge_element.get("target")) + data = self.decode_data_elements(graphml_keys, edge_element) + # GraphML stores edge ids as an attribute + # NetworkX uses them as keys in multigraphs too if no key + # attribute is specified + edge_id = edge_element.get("id") + if edge_id: + # self.edge_ids is used by `make_graph` method for non-multigraphs + self.edge_ids[source, target] = edge_id + try: + edge_id = self.edge_key_type(edge_id) + except ValueError: # Could not convert. + pass + else: + edge_id = data.get("key") + + if G.has_edge(source, target): + # mark this as a multigraph + self.multigraph = True + + # Use add_edges_from to avoid error with add_edge when `'key' in data` + # Note there is only one edge here... + G.add_edges_from([(source, target, edge_id, data)]) + + def decode_data_elements(self, graphml_keys, obj_xml): + """Use the key information to decode the data XML if present.""" + data = {} + for data_element in obj_xml.findall(f"{{{self.NS_GRAPHML}}}data"): + key = data_element.get("key") + try: + data_name = graphml_keys[key]["name"] + data_type = graphml_keys[key]["type"] + except KeyError as err: + raise nx.NetworkXError(f"Bad GraphML data: no key {key}") from err + text = data_element.text + # assume anything with subelements is a yfiles extension + if text is not None and len(list(data_element)) == 0: + if data_type is bool: + # Ignore cases. + # http://docs.oracle.com/javase/6/docs/api/java/lang/ + # Boolean.html#parseBoolean%28java.lang.String%29 + data[data_name] = self.convert_bool[text.lower()] + else: + data[data_name] = data_type(text) + elif len(list(data_element)) > 0: + # Assume yfiles as subelements, try to extract node_label + node_label = None + # set GenericNode's configuration as shape type + gn = data_element.find(f"{{{self.NS_Y}}}GenericNode") + if gn is not None: + data["shape_type"] = gn.get("configuration") + for node_type in ["GenericNode", "ShapeNode", "SVGNode", "ImageNode"]: + pref = f"{{{self.NS_Y}}}{node_type}/{{{self.NS_Y}}}" + geometry = data_element.find(f"{pref}Geometry") + if geometry is not None: + data["x"] = geometry.get("x") + data["y"] = geometry.get("y") + if node_label is None: + node_label = data_element.find(f"{pref}NodeLabel") + shape = data_element.find(f"{pref}Shape") + if shape is not None: + data["shape_type"] = shape.get("type") + if node_label is not None: + data["label"] = node_label.text + + # check all the different types of edges available in yEd. + for edge_type in [ + "PolyLineEdge", + "SplineEdge", + "QuadCurveEdge", + "BezierEdge", + "ArcEdge", + ]: + pref = f"{{{self.NS_Y}}}{edge_type}/{{{self.NS_Y}}}" + edge_label = data_element.find(f"{pref}EdgeLabel") + if edge_label is not None: + break + if edge_label is not None: + data["label"] = edge_label.text + elif text is None: + data[data_name] = "" + return data + + def find_graphml_keys(self, graph_element): + """Extracts all the keys and key defaults from the xml.""" + graphml_keys = {} + graphml_key_defaults = {} + for k in graph_element.findall(f"{{{self.NS_GRAPHML}}}key"): + attr_id = k.get("id") + attr_type = k.get("attr.type") + attr_name = k.get("attr.name") + yfiles_type = k.get("yfiles.type") + if yfiles_type is not None: + attr_name = yfiles_type + attr_type = "yfiles" + if attr_type is None: + attr_type = "string" + warnings.warn(f"No key type for id {attr_id}. Using string") + if attr_name is None: + raise nx.NetworkXError(f"Unknown key for id {attr_id}.") + graphml_keys[attr_id] = { + "name": attr_name, + "type": self.python_type[attr_type], + "for": k.get("for"), + } + # check for "default" sub-element of key element + default = k.find(f"{{{self.NS_GRAPHML}}}default") + if default is not None: + # Handle default values identically to data element values + python_type = graphml_keys[attr_id]["type"] + if python_type is bool: + graphml_key_defaults[attr_id] = self.convert_bool[ + default.text.lower() + ] + else: + graphml_key_defaults[attr_id] = python_type(default.text) + return graphml_keys, graphml_key_defaults diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__init__.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__init__.py new file mode 100644 index 0000000..532c71d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__init__.py @@ -0,0 +1,19 @@ +""" +********* +JSON data +********* +Generate and parse JSON serializable data for NetworkX graphs. + +These formats are suitable for use with the d3.js examples https://d3js.org/ + +The three formats that you can generate with NetworkX are: + + - node-link like in the d3.js example https://bl.ocks.org/mbostock/4062045 + - tree like in the d3.js example https://bl.ocks.org/mbostock/4063550 + - adjacency like in the d3.js example https://bost.ocks.org/mike/miserables/ +""" + +from networkx.readwrite.json_graph.node_link import * +from networkx.readwrite.json_graph.adjacency import * +from networkx.readwrite.json_graph.tree import * +from networkx.readwrite.json_graph.cytoscape import * diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..01ce195 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/adjacency.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/adjacency.cpython-312.pyc new file mode 100644 index 0000000..8754f29 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/adjacency.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-312.pyc new file mode 100644 index 0000000..775729b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/cytoscape.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/node_link.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/node_link.cpython-312.pyc new file mode 100644 index 0000000..ab5b58d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/node_link.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/tree.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/tree.cpython-312.pyc new file mode 100644 index 0000000..e51d702 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/__pycache__/tree.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/adjacency.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/adjacency.py new file mode 100644 index 0000000..3b05747 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/adjacency.py @@ -0,0 +1,156 @@ +import networkx as nx + +__all__ = ["adjacency_data", "adjacency_graph"] + +_attrs = {"id": "id", "key": "key"} + + +def adjacency_data(G, attrs=_attrs): + """Returns data in adjacency format that is suitable for JSON serialization + and use in JavaScript documents. + + Parameters + ---------- + G : NetworkX graph + + attrs : dict + A dictionary that contains two keys 'id' and 'key'. The corresponding + values provide the attribute names for storing NetworkX-internal graph + data. The values should be unique. Default value: + :samp:`dict(id='id', key='key')`. + + If some user-defined graph data use these attribute names as data keys, + they may be silently dropped. + + Returns + ------- + data : dict + A dictionary with adjacency formatted data. + + Raises + ------ + NetworkXError + If values in attrs are not unique. + + Examples + -------- + >>> from networkx.readwrite import json_graph + >>> G = nx.Graph([(1, 2)]) + >>> data = json_graph.adjacency_data(G) + + To serialize with json + + >>> import json + >>> s = json.dumps(data) + + Notes + ----- + Graph, node, and link attributes will be written when using this format + but attribute keys must be strings if you want to serialize the resulting + data with JSON. + + The default value of attrs will be changed in a future release of NetworkX. + + See Also + -------- + adjacency_graph, node_link_data, tree_data + """ + multigraph = G.is_multigraph() + id_ = attrs["id"] + # Allow 'key' to be omitted from attrs if the graph is not a multigraph. + key = None if not multigraph else attrs["key"] + if id_ == key: + raise nx.NetworkXError("Attribute names are not unique.") + data = {} + data["directed"] = G.is_directed() + data["multigraph"] = multigraph + data["graph"] = list(G.graph.items()) + data["nodes"] = [] + data["adjacency"] = [] + for n, nbrdict in G.adjacency(): + data["nodes"].append({**G.nodes[n], id_: n}) + adj = [] + if multigraph: + for nbr, keys in nbrdict.items(): + for k, d in keys.items(): + adj.append({**d, id_: nbr, key: k}) + else: + for nbr, d in nbrdict.items(): + adj.append({**d, id_: nbr}) + data["adjacency"].append(adj) + return data + + +@nx._dispatchable(graphs=None, returns_graph=True) +def adjacency_graph(data, directed=False, multigraph=True, attrs=_attrs): + """Returns graph from adjacency data format. + + Parameters + ---------- + data : dict + Adjacency list formatted graph data + + directed : bool + If True, and direction not specified in data, return a directed graph. + + multigraph : bool + If True, and multigraph not specified in data, return a multigraph. + + attrs : dict + A dictionary that contains two keys 'id' and 'key'. The corresponding + values provide the attribute names for storing NetworkX-internal graph + data. The values should be unique. Default value: + :samp:`dict(id='id', key='key')`. + + Returns + ------- + G : NetworkX graph + A NetworkX graph object + + Examples + -------- + >>> from networkx.readwrite import json_graph + >>> G = nx.Graph([(1, 2)]) + >>> data = json_graph.adjacency_data(G) + >>> H = json_graph.adjacency_graph(data) + + Notes + ----- + The default value of attrs will be changed in a future release of NetworkX. + + See Also + -------- + adjacency_graph, node_link_data, tree_data + """ + multigraph = data.get("multigraph", multigraph) + directed = data.get("directed", directed) + if multigraph: + graph = nx.MultiGraph() + else: + graph = nx.Graph() + if directed: + graph = graph.to_directed() + id_ = attrs["id"] + # Allow 'key' to be omitted from attrs if the graph is not a multigraph. + key = None if not multigraph else attrs["key"] + graph.graph = dict(data.get("graph", [])) + mapping = [] + for d in data["nodes"]: + node_data = d.copy() + node = node_data.pop(id_) + mapping.append(node) + graph.add_node(node) + graph.nodes[node].update(node_data) + for i, d in enumerate(data["adjacency"]): + source = mapping[i] + for tdata in d: + target_data = tdata.copy() + target = target_data.pop(id_) + if not multigraph: + graph.add_edge(source, target) + graph[source][target].update(target_data) + else: + ky = target_data.pop(key, None) + graph.add_edge(source, target, key=ky) + graph[source][target][ky].update(target_data) + return graph diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/cytoscape.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/cytoscape.py new file mode 100644 index 0000000..417e36f --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/cytoscape.py @@ -0,0 +1,190 @@ +import networkx as nx + +__all__ = ["cytoscape_data", "cytoscape_graph"] + + +def cytoscape_data(G, name="name", ident="id"): + """Returns data in Cytoscape JSON format (cyjs). + + Parameters + ---------- + G : NetworkX Graph + The graph to convert to cytoscape format + name : string + A string which is mapped to the 'name' node element in cyjs format. + Must not have the same value as `ident`. + ident : string + A string which is mapped to the 'id' node element in cyjs format. + Must not have the same value as `name`. + + Returns + ------- + data: dict + A dictionary with cyjs formatted data. + + Raises + ------ + NetworkXError + If the values for `name` and `ident` are identical. + + See Also + -------- + cytoscape_graph: convert a dictionary in cyjs format to a graph + + References + ---------- + .. [1] Cytoscape user's manual: + http://manual.cytoscape.org/en/stable/index.html + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.path_graph(2) + >>> cyto_data = nx.cytoscape_data(G) + >>> pprint(cyto_data, sort_dicts=False) + {'data': [], + 'directed': False, + 'multigraph': False, + 'elements': {'nodes': [{'data': {'id': '0', 'value': 0, 'name': '0'}}, + {'data': {'id': '1', 'value': 1, 'name': '1'}}], + 'edges': [{'data': {'source': 0, 'target': 1}}]}} + + The :mod:`json` package can be used to serialize the resulting data + + >>> import io, json + >>> with io.StringIO() as fh: # replace io with `open(...)` to write to disk + ... json.dump(cyto_data, fh) + ... fh.seek(0) # doctest: +SKIP + ... print(fh.getvalue()[:64]) # View the first 64 characters + {"data": [], "directed": false, "multigraph": false, "elements": + + """ + if name == ident: + raise nx.NetworkXError("name and ident must be different.") + + jsondata = {"data": list(G.graph.items())} + jsondata["directed"] = G.is_directed() + jsondata["multigraph"] = G.is_multigraph() + jsondata["elements"] = {"nodes": [], "edges": []} + nodes = jsondata["elements"]["nodes"] + edges = jsondata["elements"]["edges"] + + for i, j in G.nodes.items(): + n = {"data": j.copy()} + n["data"]["id"] = j.get(ident) or str(i) + n["data"]["value"] = i + n["data"]["name"] = j.get(name) or str(i) + nodes.append(n) + + if G.is_multigraph(): + for e in G.edges(keys=True): + n = {"data": G.adj[e[0]][e[1]][e[2]].copy()} + n["data"]["source"] = e[0] + n["data"]["target"] = e[1] + n["data"]["key"] = e[2] + edges.append(n) + else: + for e in G.edges(): + n = {"data": G.adj[e[0]][e[1]].copy()} + n["data"]["source"] = e[0] + n["data"]["target"] = e[1] + edges.append(n) + return jsondata + + +@nx._dispatchable(graphs=None, returns_graph=True) +def cytoscape_graph(data, name="name", ident="id"): + """ + Create a NetworkX graph from a dictionary in cytoscape JSON format. + + Parameters + ---------- + data : dict + A dictionary of data conforming to cytoscape JSON format. + name : string + A string which is mapped to the 'name' node element in cyjs format. + Must not have the same value as `ident`. + ident : string + A string which is mapped to the 'id' node element in cyjs format. + Must not have the same value as `name`. + + Returns + ------- + graph : a NetworkX graph instance + The `graph` can be an instance of `Graph`, `DiGraph`, `MultiGraph`, or + `MultiDiGraph` depending on the input data. + + Raises + ------ + NetworkXError + If the `name` and `ident` attributes are identical. + + See Also + -------- + cytoscape_data: convert a NetworkX graph to a dict in cyjs format + + References + ---------- + .. [1] Cytoscape user's manual: + http://manual.cytoscape.org/en/stable/index.html + + Examples + -------- + >>> data_dict = { + ... "data": [], + ... "directed": False, + ... "multigraph": False, + ... "elements": { + ... "nodes": [ + ... {"data": {"id": "0", "value": 0, "name": "0"}}, + ... {"data": {"id": "1", "value": 1, "name": "1"}}, + ... ], + ... "edges": [{"data": {"source": 0, "target": 1}}], + ... }, + ... } + >>> G = nx.cytoscape_graph(data_dict) + >>> G.name + '' + >>> G.nodes() + NodeView((0, 1)) + >>> G.nodes(data=True)[0] + {'id': '0', 'value': 0, 'name': '0'} + >>> G.edges(data=True) + EdgeDataView([(0, 1, {'source': 0, 'target': 1})]) + """ + if name == ident: + raise nx.NetworkXError("name and ident must be different.") + + multigraph = data.get("multigraph") + directed = data.get("directed") + if multigraph: + graph = nx.MultiGraph() + else: + graph = nx.Graph() + if directed: + graph = graph.to_directed() + graph.graph = dict(data.get("data")) + for d in data["elements"]["nodes"]: + node_data = d["data"].copy() + node = d["data"]["value"] + + if d["data"].get(name): + node_data[name] = d["data"].get(name) + if d["data"].get(ident): + node_data[ident] = d["data"].get(ident) + + graph.add_node(node) + graph.nodes[node].update(node_data) + + for d in data["elements"]["edges"]: + edge_data = d["data"].copy() + sour = d["data"]["source"] + targ = d["data"]["target"] + if multigraph: + key = d["data"].get("key", 0) + graph.add_edge(sour, targ, key=key) + graph.edges[sour, targ, key].update(edge_data) + else: + graph.add_edge(sour, targ) + graph.edges[sour, targ].update(edge_data) + return graph diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/node_link.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/node_link.py new file mode 100644 index 0000000..88bfbcd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/node_link.py @@ -0,0 +1,333 @@ +import warnings +from itertools import count + +import networkx as nx + +__all__ = ["node_link_data", "node_link_graph"] + + +def _to_tuple(x): + """Converts lists to tuples, including nested lists. + + All other non-list inputs are passed through unmodified. This function is + intended to be used to convert potentially nested lists from json files + into valid nodes. + + Examples + -------- + >>> _to_tuple([1, 2, [3, 4]]) + (1, 2, (3, 4)) + """ + if not isinstance(x, tuple | list): + return x + return tuple(map(_to_tuple, x)) + + +def node_link_data( + G, + *, + source="source", + target="target", + name="id", + key="key", + edges=None, + nodes="nodes", + link=None, +): + """Returns data in node-link format that is suitable for JSON serialization + and use in JavaScript documents. + + Parameters + ---------- + G : NetworkX graph + source : string + A string that provides the 'source' attribute name for storing NetworkX-internal graph data. + target : string + A string that provides the 'target' attribute name for storing NetworkX-internal graph data. + name : string + A string that provides the 'name' attribute name for storing NetworkX-internal graph data. + key : string + A string that provides the 'key' attribute name for storing NetworkX-internal graph data. + edges : string + A string that provides the 'edges' attribute name for storing NetworkX-internal graph data. + nodes : string + A string that provides the 'nodes' attribute name for storing NetworkX-internal graph data. + link : string + .. deprecated:: 3.4 + + The `link` argument is deprecated and will be removed in version `3.6`. + Use the `edges` keyword instead. + + A string that provides the 'edges' attribute name for storing NetworkX-internal graph data. + + Returns + ------- + data : dict + A dictionary with node-link formatted data. + + Raises + ------ + NetworkXError + If the values of 'source', 'target' and 'key' are not unique. + + Examples + -------- + >>> from pprint import pprint + >>> G = nx.Graph([("A", "B")]) + >>> data1 = nx.node_link_data(G, edges="edges") + >>> pprint(data1) + {'directed': False, + 'edges': [{'source': 'A', 'target': 'B'}], + 'graph': {}, + 'multigraph': False, + 'nodes': [{'id': 'A'}, {'id': 'B'}]} + + To serialize with JSON + + >>> import json + >>> s1 = json.dumps(data1) + >>> pprint(s1) + ('{"directed": false, "multigraph": false, "graph": {}, "nodes": [{"id": "A"}, ' + '{"id": "B"}], "edges": [{"source": "A", "target": "B"}]}') + + + A graph can also be serialized by passing `node_link_data` as an encoder function. + + >>> s1 = json.dumps(G, default=nx.node_link_data) + >>> pprint(s1) + ('{"directed": false, "multigraph": false, "graph": {}, "nodes": [{"id": "A"}, ' + '{"id": "B"}], "links": [{"source": "A", "target": "B"}]}') + + The attribute names for storing NetworkX-internal graph data can + be specified as keyword options. + + >>> H = nx.gn_graph(2) + >>> data2 = nx.node_link_data( + ... H, edges="links", source="from", target="to", nodes="vertices" + ... ) + >>> pprint(data2) + {'directed': True, + 'graph': {}, + 'links': [{'from': 1, 'to': 0}], + 'multigraph': False, + 'vertices': [{'id': 0}, {'id': 1}]} + + Notes + ----- + Graph, node, and link attributes are stored in this format. Note that + attribute keys will be converted to strings in order to comply with JSON. + + Attribute 'key' is only used for multigraphs. + + To use `node_link_data` in conjunction with `node_link_graph`, + the keyword names for the attributes must match. + + See Also + -------- + node_link_graph, adjacency_data, tree_data + """ + # TODO: Remove between the lines when `link` deprecation expires + # ------------------------------------------------------------- + if link is not None: + warnings.warn( + "Keyword argument 'link' is deprecated; use 'edges' instead", + DeprecationWarning, + stacklevel=2, + ) + if edges is not None: + raise ValueError( + "Both 'edges' and 'link' are specified. Use 'edges', 'link' will be remove in a future release" + ) + else: + edges = link + else: + if edges is None: + warnings.warn( + ( + '\nThe default value will be `edges="edges" in NetworkX 3.6.\n\n' + "To make this warning go away, explicitly set the edges kwarg, e.g.:\n\n" + ' nx.node_link_data(G, edges="links") to preserve current behavior, or\n' + ' nx.node_link_data(G, edges="edges") for forward compatibility.' + ), + FutureWarning, + ) + edges = "links" + # ------------------------------------------------------------ + + multigraph = G.is_multigraph() + + # Allow 'key' to be omitted from attrs if the graph is not a multigraph. + key = None if not multigraph else key + if len({source, target, key}) < 3: + raise nx.NetworkXError("Attribute names are not unique.") + data = { + "directed": G.is_directed(), + "multigraph": multigraph, + "graph": G.graph, + nodes: [{**G.nodes[n], name: n} for n in G], + } + if multigraph: + data[edges] = [ + {**d, source: u, target: v, key: k} + for u, v, k, d in G.edges(keys=True, data=True) + ] + else: + data[edges] = [{**d, source: u, target: v} for u, v, d in G.edges(data=True)] + return data + + +@nx._dispatchable(graphs=None, returns_graph=True) +def node_link_graph( + data, + directed=False, + multigraph=True, + *, + source="source", + target="target", + name="id", + key="key", + edges=None, + nodes="nodes", + link=None, +): + """Returns graph from node-link data format. + + Useful for de-serialization from JSON. + + Parameters + ---------- + data : dict + node-link formatted graph data + + directed : bool + If True, and direction not specified in data, return a directed graph. + + multigraph : bool + If True, and multigraph not specified in data, return a multigraph. + + source : string + A string that provides the 'source' attribute name for storing NetworkX-internal graph data. + target : string + A string that provides the 'target' attribute name for storing NetworkX-internal graph data. + name : string + A string that provides the 'name' attribute name for storing NetworkX-internal graph data. + key : string + A string that provides the 'key' attribute name for storing NetworkX-internal graph data. + edges : string + A string that provides the 'edges' attribute name for storing NetworkX-internal graph data. + nodes : string + A string that provides the 'nodes' attribute name for storing NetworkX-internal graph data. + link : string + .. deprecated:: 3.4 + + The `link` argument is deprecated and will be removed in version `3.6`. + Use the `edges` keyword instead. + + A string that provides the 'edges' attribute name for storing NetworkX-internal graph data. + + Returns + ------- + G : NetworkX graph + A NetworkX graph object + + Examples + -------- + + Create data in node-link format by converting a graph. + + >>> from pprint import pprint + >>> G = nx.Graph([("A", "B")]) + >>> data = nx.node_link_data(G, edges="edges") + >>> pprint(data) + {'directed': False, + 'edges': [{'source': 'A', 'target': 'B'}], + 'graph': {}, + 'multigraph': False, + 'nodes': [{'id': 'A'}, {'id': 'B'}]} + + Revert data in node-link format to a graph. + + >>> H = nx.node_link_graph(data, edges="edges") + >>> print(H.edges) + [('A', 'B')] + + To serialize and deserialize a graph with JSON, + + >>> import json + >>> d = json.dumps(nx.node_link_data(G, edges="edges")) + >>> H = nx.node_link_graph(json.loads(d), edges="edges") + >>> print(G.edges, H.edges) + [('A', 'B')] [('A', 'B')] + + + Notes + ----- + Attribute 'key' is only used for multigraphs. + + To use `node_link_data` in conjunction with `node_link_graph`, + the keyword names for the attributes must match. + + See Also + -------- + node_link_data, adjacency_data, tree_data + """ + # TODO: Remove between the lines when `link` deprecation expires + # ------------------------------------------------------------- + if link is not None: + warnings.warn( + "Keyword argument 'link' is deprecated; use 'edges' instead", + DeprecationWarning, + stacklevel=2, + ) + if edges is not None: + raise ValueError( + "Both 'edges' and 'link' are specified. Use 'edges', 'link' will be remove in a future release" + ) + else: + edges = link + else: + if edges is None: + warnings.warn( + ( + '\nThe default value will be changed to `edges="edges" in NetworkX 3.6.\n\n' + "To make this warning go away, explicitly set the edges kwarg, e.g.:\n\n" + ' nx.node_link_graph(data, edges="links") to preserve current behavior, or\n' + ' nx.node_link_graph(data, edges="edges") for forward compatibility.' + ), + FutureWarning, + ) + edges = "links" + # ------------------------------------------------------------- + + multigraph = data.get("multigraph", multigraph) + directed = data.get("directed", directed) + if multigraph: + graph = nx.MultiGraph() + else: + graph = nx.Graph() + if directed: + graph = graph.to_directed() + + # Allow 'key' to be omitted from attrs if the graph is not a multigraph. + key = None if not multigraph else key + graph.graph = data.get("graph", {}) + c = count() + for d in data[nodes]: + node = _to_tuple(d.get(name, next(c))) + nodedata = {str(k): v for k, v in d.items() if k != name} + graph.add_node(node, **nodedata) + for d in data[edges]: + src = tuple(d[source]) if isinstance(d[source], list) else d[source] + tgt = tuple(d[target]) if isinstance(d[target], list) else d[target] + if not multigraph: + edgedata = {str(k): v for k, v in d.items() if k != source and k != target} + graph.add_edge(src, tgt, **edgedata) + else: + ky = d.get(key, None) + edgedata = { + str(k): v + for k, v in d.items() + if k != source and k != target and k != key + } + graph.add_edge(src, tgt, ky, **edgedata) + return graph diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..c8ff487 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_adjacency.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_adjacency.cpython-312.pyc new file mode 100644 index 0000000..bf6547b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_adjacency.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_cytoscape.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_cytoscape.cpython-312.pyc new file mode 100644 index 0000000..22f8eed Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_cytoscape.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_node_link.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_node_link.cpython-312.pyc new file mode 100644 index 0000000..52fa37c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_node_link.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_tree.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_tree.cpython-312.pyc new file mode 100644 index 0000000..62ed601 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/__pycache__/test_tree.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_adjacency.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_adjacency.py new file mode 100644 index 0000000..3750638 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_adjacency.py @@ -0,0 +1,78 @@ +import copy +import json + +import pytest + +import networkx as nx +from networkx.readwrite.json_graph import adjacency_data, adjacency_graph +from networkx.utils import graphs_equal + + +class TestAdjacency: + def test_graph(self): + G = nx.path_graph(4) + H = adjacency_graph(adjacency_data(G)) + assert graphs_equal(G, H) + + def test_graph_attributes(self): + G = nx.path_graph(4) + G.add_node(1, color="red") + G.add_edge(1, 2, width=7) + G.graph["foo"] = "bar" + G.graph[1] = "one" + + H = adjacency_graph(adjacency_data(G)) + assert graphs_equal(G, H) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + + d = json.dumps(adjacency_data(G)) + H = adjacency_graph(json.loads(d)) + assert graphs_equal(G, H) + assert H.graph["foo"] == "bar" + assert H.graph[1] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + + def test_digraph(self): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + H = adjacency_graph(adjacency_data(G)) + assert H.is_directed() + assert graphs_equal(G, H) + + def test_multidigraph(self): + G = nx.MultiDiGraph() + nx.add_path(G, [1, 2, 3]) + H = adjacency_graph(adjacency_data(G)) + assert H.is_directed() + assert H.is_multigraph() + assert graphs_equal(G, H) + + def test_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") + H = adjacency_graph(adjacency_data(G)) + assert graphs_equal(G, H) + assert H[1][2]["second"]["color"] == "blue" + + def test_input_data_is_not_modified_when_building_graph(self): + G = nx.path_graph(4) + input_data = adjacency_data(G) + orig_data = copy.deepcopy(input_data) + # Ensure input is unmodified by deserialisation + assert graphs_equal(G, adjacency_graph(input_data)) + assert input_data == orig_data + + def test_adjacency_form_json_serialisable(self): + G = nx.path_graph(4) + H = adjacency_graph(json.loads(json.dumps(adjacency_data(G)))) + assert graphs_equal(G, H) + + def test_exception(self): + with pytest.raises(nx.NetworkXError): + G = nx.MultiDiGraph() + attrs = {"id": "node", "key": "node"} + adjacency_data(G, attrs) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_cytoscape.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_cytoscape.py new file mode 100644 index 0000000..5d47f21 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_cytoscape.py @@ -0,0 +1,78 @@ +import copy +import json + +import pytest + +import networkx as nx +from networkx.readwrite.json_graph import cytoscape_data, cytoscape_graph + + +def test_graph(): + G = nx.path_graph(4) + H = cytoscape_graph(cytoscape_data(G)) + assert nx.is_isomorphic(G, H) + + +def test_input_data_is_not_modified_when_building_graph(): + G = nx.path_graph(4) + input_data = cytoscape_data(G) + orig_data = copy.deepcopy(input_data) + # Ensure input is unmodified by cytoscape_graph (gh-4173) + cytoscape_graph(input_data) + assert input_data == orig_data + + +def test_graph_attributes(): + G = nx.path_graph(4) + G.add_node(1, color="red") + G.add_edge(1, 2, width=7) + G.graph["foo"] = "bar" + G.graph[1] = "one" + G.add_node(3, name="node", id="123") + + H = cytoscape_graph(cytoscape_data(G)) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + assert H.nodes[3]["name"] == "node" + assert H.nodes[3]["id"] == "123" + + d = json.dumps(cytoscape_data(G)) + H = cytoscape_graph(json.loads(d)) + assert H.graph["foo"] == "bar" + assert H.graph[1] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + assert H.nodes[3]["name"] == "node" + assert H.nodes[3]["id"] == "123" + + +def test_digraph(): + G = nx.DiGraph() + nx.add_path(G, [1, 2, 3]) + H = cytoscape_graph(cytoscape_data(G)) + assert H.is_directed() + assert nx.is_isomorphic(G, H) + + +def test_multidigraph(): + G = nx.MultiDiGraph() + nx.add_path(G, [1, 2, 3]) + H = cytoscape_graph(cytoscape_data(G)) + assert H.is_directed() + assert H.is_multigraph() + + +def test_multigraph(): + G = nx.MultiGraph() + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") + H = cytoscape_graph(cytoscape_data(G)) + assert nx.is_isomorphic(G, H) + assert H[1][2]["second"]["color"] == "blue" + + +def test_exception(): + with pytest.raises(nx.NetworkXError): + G = nx.MultiDiGraph() + cytoscape_data(G, name="foo", ident="foo") diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_node_link.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_node_link.py new file mode 100644 index 0000000..f903f60 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_node_link.py @@ -0,0 +1,175 @@ +import json + +import pytest + +import networkx as nx +from networkx.readwrite.json_graph import node_link_data, node_link_graph + + +def test_node_link_edges_default_future_warning(): + "Test FutureWarning is raised when `edges=None` in node_link_data and node_link_graph" + G = nx.Graph([(1, 2)]) + with pytest.warns(FutureWarning, match="\nThe default value will be"): + data = nx.node_link_data(G) # edges=None, the default + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = nx.node_link_graph(data) # edges=None, the default + + +def test_node_link_deprecated_link_param(): + G = nx.Graph([(1, 2)]) + with pytest.warns(DeprecationWarning, match="Keyword argument 'link'"): + data = nx.node_link_data(G, link="links") + with pytest.warns(DeprecationWarning, match="Keyword argument 'link'"): + H = nx.node_link_graph(data, link="links") + + +class TestNodeLink: + # TODO: To be removed when signature change complete + def test_custom_attrs_dep(self): + G = nx.path_graph(4) + G.add_node(1, color="red") + G.add_edge(1, 2, width=7) + G.graph[1] = "one" + G.graph["foo"] = "bar" + + attrs = { + "source": "c_source", + "target": "c_target", + "name": "c_id", + "key": "c_key", + "link": "c_links", + } + + H = node_link_graph(node_link_data(G, **attrs), multigraph=False, **attrs) + assert nx.is_isomorphic(G, H) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + + # provide only a partial dictionary of keywords. + # This is similar to an example in the doc string + attrs = { + "link": "c_links", + "source": "c_source", + "target": "c_target", + } + H = node_link_graph(node_link_data(G, **attrs), multigraph=False, **attrs) + assert nx.is_isomorphic(G, H) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + + def test_exception_dep(self): + G = nx.MultiDiGraph() + with pytest.raises(nx.NetworkXError): + with pytest.warns(FutureWarning, match="\nThe default value will be"): + node_link_data(G, name="node", source="node", target="node", key="node") + + def test_graph(self): + G = nx.path_graph(4) + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = node_link_graph(node_link_data(G)) + assert nx.is_isomorphic(G, H) + + def test_graph_attributes(self): + G = nx.path_graph(4) + G.add_node(1, color="red") + G.add_edge(1, 2, width=7) + G.graph[1] = "one" + G.graph["foo"] = "bar" + + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = node_link_graph(node_link_data(G)) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + + with pytest.warns(FutureWarning, match="\nThe default value will be"): + d = json.dumps(node_link_data(G)) + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = node_link_graph(json.loads(d)) + assert H.graph["foo"] == "bar" + assert H.graph["1"] == "one" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 + + def test_digraph(self): + G = nx.DiGraph() + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = node_link_graph(node_link_data(G)) + assert H.is_directed() + + def test_multigraph(self): + G = nx.MultiGraph() + G.add_edge(1, 2, key="first") + G.add_edge(1, 2, key="second", color="blue") + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = node_link_graph(node_link_data(G)) + assert nx.is_isomorphic(G, H) + assert H[1][2]["second"]["color"] == "blue" + + def test_graph_with_tuple_nodes(self): + G = nx.Graph() + G.add_edge((0, 0), (1, 0), color=[255, 255, 0]) + with pytest.warns(FutureWarning, match="\nThe default value will be"): + d = node_link_data(G) + dumped_d = json.dumps(d) + dd = json.loads(dumped_d) + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = node_link_graph(dd) + assert H.nodes[(0, 0)] == G.nodes[(0, 0)] + assert H[(0, 0)][(1, 0)]["color"] == [255, 255, 0] + + def test_unicode_keys(self): + q = "qualité" + G = nx.Graph() + G.add_node(1, **{q: q}) + with pytest.warns(FutureWarning, match="\nThe default value will be"): + s = node_link_data(G) + output = json.dumps(s, ensure_ascii=False) + data = json.loads(output) + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = node_link_graph(data) + assert H.nodes[1][q] == q + + def test_exception(self): + G = nx.MultiDiGraph() + attrs = {"name": "node", "source": "node", "target": "node", "key": "node"} + with pytest.raises(nx.NetworkXError): + with pytest.warns(FutureWarning, match="\nThe default value will be"): + node_link_data(G, **attrs) + + def test_string_ids(self): + q = "qualité" + G = nx.DiGraph() + G.add_node("A") + G.add_node(q) + G.add_edge("A", q) + with pytest.warns(FutureWarning, match="\nThe default value will be"): + data = node_link_data(G) + assert data["links"][0]["source"] == "A" + assert data["links"][0]["target"] == q + with pytest.warns(FutureWarning, match="\nThe default value will be"): + H = node_link_graph(data) + assert nx.is_isomorphic(G, H) + + def test_custom_attrs(self): + G = nx.path_graph(4) + G.add_node(1, color="red") + G.add_edge(1, 2, width=7) + G.graph[1] = "one" + G.graph["foo"] = "bar" + + attrs = { + "source": "c_source", + "target": "c_target", + "name": "c_id", + "key": "c_key", + "link": "c_links", + } + + H = node_link_graph(node_link_data(G, **attrs), multigraph=False, **attrs) + assert nx.is_isomorphic(G, H) + assert H.graph["foo"] == "bar" + assert H.nodes[1]["color"] == "red" + assert H[1][2]["width"] == 7 diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_tree.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_tree.py new file mode 100644 index 0000000..643a14d --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tests/test_tree.py @@ -0,0 +1,48 @@ +import json + +import pytest + +import networkx as nx +from networkx.readwrite.json_graph import tree_data, tree_graph + + +def test_graph(): + G = nx.DiGraph() + G.add_nodes_from([1, 2, 3], color="red") + G.add_edge(1, 2, foo=7) + G.add_edge(1, 3, foo=10) + G.add_edge(3, 4, foo=10) + H = tree_graph(tree_data(G, 1)) + assert nx.is_isomorphic(G, H) + + +def test_graph_attributes(): + G = nx.DiGraph() + G.add_nodes_from([1, 2, 3], color="red") + G.add_edge(1, 2, foo=7) + G.add_edge(1, 3, foo=10) + G.add_edge(3, 4, foo=10) + H = tree_graph(tree_data(G, 1)) + assert H.nodes[1]["color"] == "red" + + d = json.dumps(tree_data(G, 1)) + H = tree_graph(json.loads(d)) + assert H.nodes[1]["color"] == "red" + + +def test_exceptions(): + with pytest.raises(TypeError, match="is not a tree."): + G = nx.complete_graph(3) + tree_data(G, 0) + with pytest.raises(TypeError, match="is not directed."): + G = nx.path_graph(3) + tree_data(G, 0) + with pytest.raises(TypeError, match="is not weakly connected."): + G = nx.path_graph(3, create_using=nx.DiGraph) + G.add_edge(2, 0) + G.add_node(3) + tree_data(G, 0) + with pytest.raises(nx.NetworkXError, match="must be different."): + G = nx.MultiDiGraph() + G.add_node(0) + tree_data(G, 0, ident="node", children="node") diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tree.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tree.py new file mode 100644 index 0000000..22b07b0 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/json_graph/tree.py @@ -0,0 +1,137 @@ +from itertools import chain + +import networkx as nx + +__all__ = ["tree_data", "tree_graph"] + + +def tree_data(G, root, ident="id", children="children"): + """Returns data in tree format that is suitable for JSON serialization + and use in JavaScript documents. + + Parameters + ---------- + G : NetworkX graph + G must be an oriented tree + + root : node + The root of the tree + + ident : string + Attribute name for storing NetworkX-internal graph data. `ident` must + have a different value than `children`. The default is 'id'. + + children : string + Attribute name for storing NetworkX-internal graph data. `children` + must have a different value than `ident`. The default is 'children'. + + Returns + ------- + data : dict + A dictionary with node-link formatted data. + + Raises + ------ + NetworkXError + If `children` and `ident` attributes are identical. + + Examples + -------- + >>> from networkx.readwrite import json_graph + >>> G = nx.DiGraph([(1, 2)]) + >>> data = json_graph.tree_data(G, root=1) + + To serialize with json + + >>> import json + >>> s = json.dumps(data) + + Notes + ----- + Node attributes are stored in this format but keys + for attributes must be strings if you want to serialize with JSON. + + Graph and edge attributes are not stored. + + See Also + -------- + tree_graph, node_link_data, adjacency_data + """ + if G.number_of_nodes() != G.number_of_edges() + 1: + raise TypeError("G is not a tree.") + if not G.is_directed(): + raise TypeError("G is not directed.") + if not nx.is_weakly_connected(G): + raise TypeError("G is not weakly connected.") + + if ident == children: + raise nx.NetworkXError("The values for `id` and `children` must be different.") + + def add_children(n, G): + nbrs = G[n] + if len(nbrs) == 0: + return [] + children_ = [] + for child in nbrs: + d = {**G.nodes[child], ident: child} + c = add_children(child, G) + if c: + d[children] = c + children_.append(d) + return children_ + + return {**G.nodes[root], ident: root, children: add_children(root, G)} + + +@nx._dispatchable(graphs=None, returns_graph=True) +def tree_graph(data, ident="id", children="children"): + """Returns graph from tree data format. + + Parameters + ---------- + data : dict + Tree formatted graph data + + ident : string + Attribute name for storing NetworkX-internal graph data. `ident` must + have a different value than `children`. The default is 'id'. + + children : string + Attribute name for storing NetworkX-internal graph data. `children` + must have a different value than `ident`. The default is 'children'. + + Returns + ------- + G : NetworkX DiGraph + + Examples + -------- + >>> from networkx.readwrite import json_graph + >>> G = nx.DiGraph([(1, 2)]) + >>> data = json_graph.tree_data(G, root=1) + >>> H = json_graph.tree_graph(data) + + See Also + -------- + tree_data, node_link_data, adjacency_data + """ + graph = nx.DiGraph() + + def add_children(parent, children_): + for data in children_: + child = data[ident] + graph.add_edge(parent, child) + grandchildren = data.get(children, []) + if grandchildren: + add_children(child, grandchildren) + nodedata = { + str(k): v for k, v in data.items() if k != ident and k != children + } + graph.add_node(child, **nodedata) + + root = data[ident] + children_ = data.get(children, []) + nodedata = {str(k): v for k, v in data.items() if k != ident and k != children} + graph.add_node(root, **nodedata) + add_children(root, children_) + return graph diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/leda.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/leda.py new file mode 100644 index 0000000..8d88a67 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/leda.py @@ -0,0 +1,108 @@ +""" +Read graphs in LEDA format. + +LEDA is a C++ class library for efficient data types and algorithms. + +Format +------ +See http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html + +""" +# Original author: D. Eppstein, UC Irvine, August 12, 2003. +# The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. + +__all__ = ["read_leda", "parse_leda"] + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.utils import open_file + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_leda(path, encoding="UTF-8"): + """Read graph in LEDA format from path. + + Parameters + ---------- + path : file or string + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + Returns + ------- + G : NetworkX graph + + Examples + -------- + >>> G = nx.read_leda("file.leda") # doctest: +SKIP + + References + ---------- + .. [1] http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html + """ + lines = (line.decode(encoding) for line in path) + G = parse_leda(lines) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_leda(lines): + """Read graph in LEDA format from string or iterable. + + Parameters + ---------- + lines : string or iterable + Data in LEDA format. + + Returns + ------- + G : NetworkX graph + + Examples + -------- + >>> G = nx.parse_leda(string) # doctest: +SKIP + + References + ---------- + .. [1] http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html + """ + if isinstance(lines, str): + lines = iter(lines.split("\n")) + lines = iter( + [ + line.rstrip("\n") + for line in lines + if not (line.startswith(("#", "\n")) or line == "") + ] + ) + for i in range(3): + next(lines) + # Graph + du = int(next(lines)) # -1=directed, -2=undirected + if du == -1: + G = nx.DiGraph() + else: + G = nx.Graph() + + # Nodes + n = int(next(lines)) # number of nodes + node = {} + for i in range(1, n + 1): # LEDA counts from 1 to n + symbol = next(lines).rstrip().strip("|{}| ") + if symbol == "": + symbol = str(i) # use int if no label - could be trouble + node[i] = symbol + + G.add_nodes_from([s for i, s in node.items()]) + + # Edges + m = int(next(lines)) # number of edges + for i in range(m): + try: + s, t, reversal, label = next(lines).split() + except BaseException as err: + raise NetworkXError(f"Too few fields in LEDA.GRAPH edge {i + 1}") from err + # BEWARE: no handling of reversal edges + G.add_edge(node[int(s)], node[int(t)], label=label[2:-2]) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/multiline_adjlist.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/multiline_adjlist.py new file mode 100644 index 0000000..f5b0b1c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/multiline_adjlist.py @@ -0,0 +1,393 @@ +""" +************************* +Multi-line Adjacency List +************************* +Read and write NetworkX graphs as multi-line adjacency lists. + +The multi-line adjacency list format is useful for graphs with +nodes that can be meaningfully represented as strings. With this format +simple edge data can be stored but node or graph data is not. + +Format +------ +The first label in a line is the source node label followed by the node degree +d. The next d lines are target node labels and optional edge data. +That pattern repeats for all nodes in the graph. + +The graph with edges a-b, a-c, d-e can be represented as the following +adjacency list (anything following the # in a line is a comment):: + + # example.multiline-adjlist + a 2 + b + c + d 1 + e +""" + +__all__ = [ + "generate_multiline_adjlist", + "write_multiline_adjlist", + "parse_multiline_adjlist", + "read_multiline_adjlist", +] + +import networkx as nx +from networkx.utils import open_file + + +def generate_multiline_adjlist(G, delimiter=" "): + """Generate a single line of the graph G in multiline adjacency list format. + + Parameters + ---------- + G : NetworkX graph + + delimiter : string, optional + Separator for node labels + + Returns + ------- + lines : string + Lines of data in multiline adjlist format. + + Examples + -------- + >>> G = nx.lollipop_graph(4, 3) + >>> for line in nx.generate_multiline_adjlist(G): + ... print(line) + 0 3 + 1 {} + 2 {} + 3 {} + 1 2 + 2 {} + 3 {} + 2 1 + 3 {} + 3 1 + 4 {} + 4 1 + 5 {} + 5 1 + 6 {} + 6 0 + + See Also + -------- + write_multiline_adjlist, read_multiline_adjlist + """ + if G.is_directed(): + if G.is_multigraph(): + for s, nbrs in G.adjacency(): + nbr_edges = [ + (u, data) + for u, datadict in nbrs.items() + for key, data in datadict.items() + ] + deg = len(nbr_edges) + yield str(s) + delimiter + str(deg) + for u, d in nbr_edges: + if d is None: + yield str(u) + else: + yield str(u) + delimiter + str(d) + else: # directed single edges + for s, nbrs in G.adjacency(): + deg = len(nbrs) + yield str(s) + delimiter + str(deg) + for u, d in nbrs.items(): + if d is None: + yield str(u) + else: + yield str(u) + delimiter + str(d) + else: # undirected + if G.is_multigraph(): + seen = set() # helper dict used to avoid duplicate edges + for s, nbrs in G.adjacency(): + nbr_edges = [ + (u, data) + for u, datadict in nbrs.items() + if u not in seen + for key, data in datadict.items() + ] + deg = len(nbr_edges) + yield str(s) + delimiter + str(deg) + for u, d in nbr_edges: + if d is None: + yield str(u) + else: + yield str(u) + delimiter + str(d) + seen.add(s) + else: # undirected single edges + seen = set() # helper dict used to avoid duplicate edges + for s, nbrs in G.adjacency(): + nbr_edges = [(u, d) for u, d in nbrs.items() if u not in seen] + deg = len(nbr_edges) + yield str(s) + delimiter + str(deg) + for u, d in nbr_edges: + if d is None: + yield str(u) + else: + yield str(u) + delimiter + str(d) + seen.add(s) + + +@open_file(1, mode="wb") +def write_multiline_adjlist(G, path, delimiter=" ", comments="#", encoding="utf-8"): + """Write the graph G in multiline adjacency list format to path + + Parameters + ---------- + G : NetworkX graph + + path : string or file + Filename or file handle to write to. + Filenames ending in .gz or .bz2 will be compressed. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels + + encoding : string, optional + Text encoding. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_multiline_adjlist(G, "test.multi_adjlist") + + The path can be a file handle or a string with the name of the file. If a + file handle is provided, it has to be opened in 'wb' mode. + + >>> fh = open("test.multi_adjlist2", "wb") + >>> nx.write_multiline_adjlist(G, fh) + + Filenames ending in .gz or .bz2 will be compressed. + + >>> nx.write_multiline_adjlist(G, "test.multi_adjlist.gz") + + See Also + -------- + read_multiline_adjlist + """ + import sys + import time + + pargs = comments + " ".join(sys.argv) + header = ( + f"{pargs}\n" + + comments + + f" GMT {time.asctime(time.gmtime())}\n" + + comments + + f" {G.name}\n" + ) + path.write(header.encode(encoding)) + + for multiline in generate_multiline_adjlist(G, delimiter): + multiline += "\n" + path.write(multiline.encode(encoding)) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_multiline_adjlist( + lines, comments="#", delimiter=None, create_using=None, nodetype=None, edgetype=None +): + """Parse lines of a multiline adjacency list representation of a graph. + + Parameters + ---------- + lines : list or iterator of strings + Input data in multiline adjlist format + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + nodetype : Python type, optional + Convert nodes to this type. + + edgetype : Python type, optional + Convert edges to this type. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels. The default is whitespace. + + Returns + ------- + G: NetworkX graph + The graph corresponding to the lines in multiline adjacency list format. + + Examples + -------- + >>> lines = [ + ... "1 2", + ... "2 {'weight':3, 'name': 'Frodo'}", + ... "3 {}", + ... "2 1", + ... "5 {'weight':6, 'name': 'Saruman'}", + ... ] + >>> G = nx.parse_multiline_adjlist(iter(lines), nodetype=int) + >>> list(G) + [1, 2, 3, 5] + + """ + from ast import literal_eval + + G = nx.empty_graph(0, create_using) + for line in lines: + p = line.find(comments) + if p >= 0: + line = line[:p] + if not line: + continue + try: + (u, deg) = line.rstrip("\n").split(delimiter) + deg = int(deg) + except BaseException as err: + raise TypeError(f"Failed to read node and degree on line ({line})") from err + if nodetype is not None: + try: + u = nodetype(u) + except BaseException as err: + raise TypeError( + f"Failed to convert node ({u}) to type {nodetype}" + ) from err + G.add_node(u) + for i in range(deg): + while True: + try: + line = next(lines) + except StopIteration as err: + msg = f"Failed to find neighbor for node ({u})" + raise TypeError(msg) from err + p = line.find(comments) + if p >= 0: + line = line[:p] + if line: + break + vlist = line.rstrip("\n").split(delimiter) + numb = len(vlist) + if numb < 1: + continue # isolated node + v = vlist.pop(0) + data = "".join(vlist) + if nodetype is not None: + try: + v = nodetype(v) + except BaseException as err: + raise TypeError( + f"Failed to convert node ({v}) to type {nodetype}" + ) from err + if edgetype is not None: + try: + edgedata = {"weight": edgetype(data)} + except BaseException as err: + raise TypeError( + f"Failed to convert edge data ({data}) to type {edgetype}" + ) from err + else: + try: # try to evaluate + edgedata = literal_eval(data) + except: + edgedata = {} + G.add_edge(u, v, **edgedata) + + return G + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_multiline_adjlist( + path, + comments="#", + delimiter=None, + create_using=None, + nodetype=None, + edgetype=None, + encoding="utf-8", +): + """Read graph in multi-line adjacency list format from path. + + Parameters + ---------- + path : string or file + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + create_using : NetworkX graph constructor, optional (default=nx.Graph) + Graph type to create. If graph instance, then cleared before populated. + + nodetype : Python type, optional + Convert nodes to this type. + + edgetype : Python type, optional + Convert edge data to this type. + + comments : string, optional + Marker for comment lines + + delimiter : string, optional + Separator for node labels. The default is whitespace. + + Returns + ------- + G: NetworkX graph + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_multiline_adjlist(G, "test.multi_adjlistP4") + >>> G = nx.read_multiline_adjlist("test.multi_adjlistP4") + + The path can be a file or a string with the name of the file. If a + file s provided, it has to be opened in 'rb' mode. + + >>> fh = open("test.multi_adjlistP4", "rb") + >>> G = nx.read_multiline_adjlist(fh) + + Filenames ending in .gz or .bz2 will be compressed. + + >>> nx.write_multiline_adjlist(G, "test.multi_adjlistP4.gz") + >>> G = nx.read_multiline_adjlist("test.multi_adjlistP4.gz") + + The optional nodetype is a function to convert node strings to nodetype. + + For example + + >>> G = nx.read_multiline_adjlist("test.multi_adjlistP4", nodetype=int) + + will attempt to convert all nodes to integer type. + + The optional edgetype is a function to convert edge data strings to + edgetype. + + >>> G = nx.read_multiline_adjlist("test.multi_adjlistP4") + + The optional create_using parameter is a NetworkX graph container. + The default is Graph(), an undirected graph. To read the data as + a directed graph use + + >>> G = nx.read_multiline_adjlist("test.multi_adjlistP4", create_using=nx.DiGraph) + + Notes + ----- + This format does not store graph, node, or edge data. + + See Also + -------- + write_multiline_adjlist + """ + lines = (line.decode(encoding) for line in path) + return parse_multiline_adjlist( + lines, + comments=comments, + delimiter=delimiter, + create_using=create_using, + nodetype=nodetype, + edgetype=edgetype, + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/p2g.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/p2g.py new file mode 100644 index 0000000..4bde362 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/p2g.py @@ -0,0 +1,113 @@ +""" +This module provides the following: read and write of p2g format +used in metabolic pathway studies. + +See: + +for a description. + +The summary is included here: + +A file that describes a uniquely labeled graph (with extension ".gr") +format looks like the following: + + +name +3 4 +a +1 2 +b + +c +0 2 + +"name" is simply a description of what the graph corresponds to. The +second line displays the number of nodes and number of edges, +respectively. This sample graph contains three nodes labeled "a", "b", +and "c". The rest of the graph contains two lines for each node. The +first line for a node contains the node label. After the declaration +of the node label, the out-edges of that node in the graph are +provided. For instance, "a" is linked to nodes 1 and 2, which are +labeled "b" and "c", while the node labeled "b" has no outgoing +edges. Observe that node labeled "c" has an outgoing edge to +itself. Indeed, self-loops are allowed. Node index starts from 0. + +""" + +import networkx as nx +from networkx.utils import open_file + + +@open_file(1, mode="w") +def write_p2g(G, path, encoding="utf-8"): + """Write NetworkX graph in p2g format. + + Notes + ----- + This format is meant to be used with directed graphs with + possible self loops. + """ + path.write((f"{G.name}\n").encode(encoding)) + path.write((f"{G.order()} {G.size()}\n").encode(encoding)) + nodes = list(G) + # make dictionary mapping nodes to integers + nodenumber = dict(zip(nodes, range(len(nodes)))) + for n in nodes: + path.write((f"{n}\n").encode(encoding)) + for nbr in G.neighbors(n): + path.write((f"{nodenumber[nbr]} ").encode(encoding)) + path.write("\n".encode(encoding)) + + +@open_file(0, mode="r") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_p2g(path, encoding="utf-8"): + """Read graph in p2g format from path. + + Parameters + ---------- + path : string or file + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + Returns + ------- + MultiDiGraph + + Notes + ----- + If you want a DiGraph (with no self loops allowed and no edge data) + use D=nx.DiGraph(read_p2g(path)) + """ + lines = (line.decode(encoding) for line in path) + G = parse_p2g(lines) + return G + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_p2g(lines): + """Parse p2g format graph from string or iterable. + + Returns + ------- + MultiDiGraph + """ + description = next(lines).strip() + # are multiedges (parallel edges) allowed? + G = nx.MultiDiGraph(name=description, selfloops=True) + nnodes, nedges = map(int, next(lines).split()) + nodelabel = {} + nbrs = {} + # loop over the nodes keeping track of node labels and out neighbors + # defer adding edges until all node labels are known + for i in range(nnodes): + n = next(lines).strip() + nodelabel[i] = n + G.add_node(n) + nbrs[n] = map(int, next(lines).split()) + # now we know all of the node labels so we can add the edges + # with the correct labels + for n in G: + for nbr in nbrs[n]: + G.add_edge(n, nodelabel[nbr]) + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/pajek.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/pajek.py new file mode 100644 index 0000000..2cab6b9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/pajek.py @@ -0,0 +1,286 @@ +""" +***** +Pajek +***** +Read graphs in Pajek format. + +This implementation handles directed and undirected graphs including +those with self loops and parallel edges. + +Format +------ +See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm +for format information. + +""" + +import warnings + +import networkx as nx +from networkx.utils import open_file + +__all__ = ["read_pajek", "parse_pajek", "generate_pajek", "write_pajek"] + + +def generate_pajek(G): + """Generate lines in Pajek graph format. + + Parameters + ---------- + G : graph + A Networkx graph + + References + ---------- + See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm + for format information. + """ + if G.name == "": + name = "NetworkX" + else: + name = G.name + # Apparently many Pajek format readers can't process this line + # So we'll leave it out for now. + # yield '*network %s'%name + + # write nodes with attributes + yield f"*vertices {G.order()}" + nodes = list(G) + # make dictionary mapping nodes to integers + nodenumber = dict(zip(nodes, range(1, len(nodes) + 1))) + for n in nodes: + # copy node attributes and pop mandatory attributes + # to avoid duplication. + na = G.nodes.get(n, {}).copy() + x = na.pop("x", 0.0) + y = na.pop("y", 0.0) + try: + id = int(na.pop("id", nodenumber[n])) + except ValueError as err: + err.args += ( + ( + "Pajek format requires 'id' to be an int()." + " Refer to the 'Relabeling nodes' section." + ), + ) + raise + nodenumber[n] = id + shape = na.pop("shape", "ellipse") + s = " ".join(map(make_qstr, (id, n, x, y, shape))) + # only optional attributes are left in na. + for k, v in na.items(): + if isinstance(v, str) and v.strip() != "": + s += f" {make_qstr(k)} {make_qstr(v)}" + else: + warnings.warn( + f"Node attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}." + ) + yield s + + # write edges with attributes + if G.is_directed(): + yield "*arcs" + else: + yield "*edges" + for u, v, edgedata in G.edges(data=True): + d = edgedata.copy() + value = d.pop("weight", 1.0) # use 1 as default edge value + s = " ".join(map(make_qstr, (nodenumber[u], nodenumber[v], value))) + for k, v in d.items(): + if isinstance(v, str) and v.strip() != "": + s += f" {make_qstr(k)} {make_qstr(v)}" + else: + warnings.warn( + f"Edge attribute {k} is not processed. {('Empty attribute' if isinstance(v, str) else 'Non-string attribute')}." + ) + yield s + + +@open_file(1, mode="wb") +def write_pajek(G, path, encoding="UTF-8"): + """Write graph in Pajek format to path. + + Parameters + ---------- + G : graph + A Networkx graph + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_pajek(G, "test.netP4") + + Warnings + -------- + Optional node attributes and edge attributes must be non-empty strings. + Otherwise it will not be written into the file. You will need to + convert those attributes to strings if you want to keep them. + + References + ---------- + See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm + for format information. + """ + for line in generate_pajek(G): + line += "\n" + path.write(line.encode(encoding)) + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_pajek(path, encoding="UTF-8"): + """Read graph in Pajek format from path. + + Parameters + ---------- + path : file or string + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + Returns + ------- + G : NetworkX MultiGraph or MultiDiGraph. + + Examples + -------- + >>> G = nx.path_graph(4) + >>> nx.write_pajek(G, "test.net") + >>> G = nx.read_pajek("test.net") + + To create a Graph instead of a MultiGraph use + + >>> G1 = nx.Graph(G) + + References + ---------- + See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm + for format information. + """ + lines = (line.decode(encoding) for line in path) + return parse_pajek(lines) + + +@nx._dispatchable(graphs=None, returns_graph=True) +def parse_pajek(lines): + """Parse Pajek format graph from string or iterable. + + Parameters + ---------- + lines : string or iterable + Data in Pajek format. + + Returns + ------- + G : NetworkX graph + + See Also + -------- + read_pajek + + """ + import shlex + + # multigraph=False + if isinstance(lines, str): + lines = iter(lines.split("\n")) + lines = iter([line.rstrip("\n") for line in lines]) + G = nx.MultiDiGraph() # are multiedges allowed in Pajek? assume yes + labels = [] # in the order of the file, needed for matrix + while lines: + try: + l = next(lines) + except: # EOF + break + if l.lower().startswith("*network"): + try: + label, name = l.split(None, 1) + except ValueError: + # Line was not of the form: *network NAME + pass + else: + G.graph["name"] = name + elif l.lower().startswith("*vertices"): + nodelabels = {} + l, nnodes = l.split() + for i in range(int(nnodes)): + l = next(lines) + try: + splitline = [ + x.decode("utf-8") for x in shlex.split(str(l).encode("utf-8")) + ] + except AttributeError: + splitline = shlex.split(str(l)) + id, label = splitline[0:2] + labels.append(label) + G.add_node(label) + nodelabels[id] = label + G.nodes[label]["id"] = id + try: + x, y, shape = splitline[2:5] + G.nodes[label].update( + {"x": float(x), "y": float(y), "shape": shape} + ) + except: + pass + extra_attr = zip(splitline[5::2], splitline[6::2]) + G.nodes[label].update(extra_attr) + elif l.lower().startswith("*edges") or l.lower().startswith("*arcs"): + if l.lower().startswith("*edge"): + # switch from multidigraph to multigraph + G = nx.MultiGraph(G) + if l.lower().startswith("*arcs"): + # switch to directed with multiple arcs for each existing edge + G = G.to_directed() + for l in lines: + try: + splitline = [ + x.decode("utf-8") for x in shlex.split(str(l).encode("utf-8")) + ] + except AttributeError: + splitline = shlex.split(str(l)) + + if len(splitline) < 2: + continue + ui, vi = splitline[0:2] + u = nodelabels.get(ui, ui) + v = nodelabels.get(vi, vi) + # parse the data attached to this edge and put in a dictionary + edge_data = {} + try: + # there should always be a single value on the edge? + w = splitline[2:3] + edge_data.update({"weight": float(w[0])}) + except: + pass + # if there isn't, just assign a 1 + # edge_data.update({'value':1}) + extra_attr = zip(splitline[3::2], splitline[4::2]) + edge_data.update(extra_attr) + # if G.has_edge(u,v): + # multigraph=True + G.add_edge(u, v, **edge_data) + elif l.lower().startswith("*matrix"): + G = nx.DiGraph(G) + adj_list = ( + (labels[row], labels[col], {"weight": int(data)}) + for (row, line) in enumerate(lines) + for (col, data) in enumerate(line.split()) + if int(data) != 0 + ) + G.add_edges_from(adj_list) + + return G + + +def make_qstr(t): + """Returns the string representation of t. + Add outer double-quotes if the string has a space. + """ + if not isinstance(t, str): + t = str(t) + if " " in t: + t = f'"{t}"' + return t diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/sparse6.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/sparse6.py new file mode 100644 index 0000000..b82ae5c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/sparse6.py @@ -0,0 +1,379 @@ +# Original author: D. Eppstein, UC Irvine, August 12, 2003. +# The original code at https://www.ics.uci.edu/~eppstein/PADS/ is public domain. +"""Functions for reading and writing graphs in the *sparse6* format. + +The *sparse6* file format is a space-efficient format for large sparse +graphs. For small graphs or large dense graphs, use the *graph6* file +format. + +For more information, see the `sparse6`_ homepage. + +.. _sparse6: https://users.cecs.anu.edu.au/~bdm/data/formats.html + +""" + +import networkx as nx +from networkx.exception import NetworkXError +from networkx.readwrite.graph6 import data_to_n, n_to_data +from networkx.utils import not_implemented_for, open_file + +__all__ = ["from_sparse6_bytes", "read_sparse6", "to_sparse6_bytes", "write_sparse6"] + + +def _generate_sparse6_bytes(G, nodes, header): + """Yield bytes in the sparse6 encoding of a graph. + + `G` is an undirected simple graph. `nodes` is the list of nodes for + which the node-induced subgraph will be encoded; if `nodes` is the + list of all nodes in the graph, the entire graph will be + encoded. `header` is a Boolean that specifies whether to generate + the header ``b'>>sparse6<<'`` before the remaining data. + + This function generates `bytes` objects in the following order: + + 1. the header (if requested), + 2. the encoding of the number of nodes, + 3. each character, one-at-a-time, in the encoding of the requested + node-induced subgraph, + 4. a newline character. + + This function raises :exc:`ValueError` if the graph is too large for + the graph6 format (that is, greater than ``2 ** 36`` nodes). + + """ + n = len(G) + if n >= 2**36: + raise ValueError( + "sparse6 is only defined if number of nodes is less than 2 ** 36" + ) + if header: + yield b">>sparse6<<" + yield b":" + for d in n_to_data(n): + yield str.encode(chr(d + 63)) + + k = 1 + while 1 << k < n: + k += 1 + + def enc(x): + """Big endian k-bit encoding of x""" + return [1 if (x & 1 << (k - 1 - i)) else 0 for i in range(k)] + + edges = sorted((max(u, v), min(u, v)) for u, v in G.edges()) + bits = [] + curv = 0 + for v, u in edges: + if v == curv: # current vertex edge + bits.append(0) + bits.extend(enc(u)) + elif v == curv + 1: # next vertex edge + curv += 1 + bits.append(1) + bits.extend(enc(u)) + else: # skip to vertex v and then add edge to u + curv = v + bits.append(1) + bits.extend(enc(v)) + bits.append(0) + bits.extend(enc(u)) + if k < 6 and n == (1 << k) and ((-len(bits)) % 6) >= k and curv < (n - 1): + # Padding special case: small k, n=2^k, + # more than k bits of padding needed, + # current vertex is not (n-1) -- + # appending 1111... would add a loop on (n-1) + bits.append(0) + bits.extend([1] * ((-len(bits)) % 6)) + else: + bits.extend([1] * ((-len(bits)) % 6)) + + data = [ + (bits[i + 0] << 5) + + (bits[i + 1] << 4) + + (bits[i + 2] << 3) + + (bits[i + 3] << 2) + + (bits[i + 4] << 1) + + (bits[i + 5] << 0) + for i in range(0, len(bits), 6) + ] + + for d in data: + yield str.encode(chr(d + 63)) + yield b"\n" + + +@nx._dispatchable(graphs=None, returns_graph=True) +def from_sparse6_bytes(string): + """Read an undirected graph in sparse6 format from string. + + Parameters + ---------- + string : string + Data in sparse6 format + + Returns + ------- + G : Graph + + Raises + ------ + NetworkXError + If the string is unable to be parsed in sparse6 format + + Examples + -------- + >>> G = nx.from_sparse6_bytes(b":A_") + >>> sorted(G.edges()) + [(0, 1), (0, 1), (0, 1)] + + See Also + -------- + read_sparse6, write_sparse6 + + References + ---------- + .. [1] Sparse6 specification + + + """ + if string.startswith(b">>sparse6<<"): + string = string[11:] + if not string.startswith(b":"): + raise NetworkXError("Expected leading colon in sparse6") + + chars = [c - 63 for c in string[1:]] + n, data = data_to_n(chars) + k = 1 + while 1 << k < n: + k += 1 + + def parseData(): + """Returns stream of pairs b[i], x[i] for sparse6 format.""" + chunks = iter(data) + d = None # partial data word + dLen = 0 # how many unparsed bits are left in d + + while 1: + if dLen < 1: + try: + d = next(chunks) + except StopIteration: + return + dLen = 6 + dLen -= 1 + b = (d >> dLen) & 1 # grab top remaining bit + + x = d & ((1 << dLen) - 1) # partially built up value of x + xLen = dLen # how many bits included so far in x + while xLen < k: # now grab full chunks until we have enough + try: + d = next(chunks) + except StopIteration: + return + dLen = 6 + x = (x << 6) + d + xLen += 6 + x = x >> (xLen - k) # shift back the extra bits + dLen = xLen - k + yield b, x + + v = 0 + + G = nx.MultiGraph() + G.add_nodes_from(range(n)) + + multigraph = False + for b, x in parseData(): + if b == 1: + v += 1 + # padding with ones can cause overlarge number here + if x >= n or v >= n: + break + elif x > v: + v = x + else: + if G.has_edge(x, v): + multigraph = True + G.add_edge(x, v) + if not multigraph: + G = nx.Graph(G) + return G + + +def to_sparse6_bytes(G, nodes=None, header=True): + """Convert an undirected graph to bytes in sparse6 format. + + Parameters + ---------- + G : Graph (undirected) + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by ``G.nodes()`` is used. + + header: bool + If True add '>>sparse6<<' bytes to head of data. + + Raises + ------ + NetworkXNotImplemented + If the graph is directed. + + ValueError + If the graph has at least ``2 ** 36`` nodes; the sparse6 format + is only defined for graphs of order less than ``2 ** 36``. + + Examples + -------- + >>> nx.to_sparse6_bytes(nx.path_graph(2)) + b'>>sparse6<<:An\\n' + + See Also + -------- + to_sparse6_bytes, read_sparse6, write_sparse6_bytes + + Notes + ----- + The returned bytes end with a newline character. + + The format does not support edge or node labels. + + References + ---------- + .. [1] Graph6 specification + + + """ + if nodes is not None: + G = G.subgraph(nodes) + G = nx.convert_node_labels_to_integers(G, ordering="sorted") + return b"".join(_generate_sparse6_bytes(G, nodes, header)) + + +@open_file(0, mode="rb") +@nx._dispatchable(graphs=None, returns_graph=True) +def read_sparse6(path): + """Read an undirected graph in sparse6 format from path. + + Parameters + ---------- + path : file or string + Filename or file handle to read. + Filenames ending in .gz or .bz2 will be decompressed. + + Returns + ------- + G : Graph/Multigraph or list of Graphs/MultiGraphs + If the file contains multiple lines then a list of graphs is returned + + Raises + ------ + NetworkXError + If the string is unable to be parsed in sparse6 format + + Examples + -------- + You can read a sparse6 file by giving the path to the file:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(delete=False) as f: + ... _ = f.write(b">>sparse6<<:An\\n") + ... _ = f.seek(0) + ... G = nx.read_sparse6(f.name) + >>> list(G.edges()) + [(0, 1)] + + You can also read a sparse6 file by giving an open file-like object:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile() as f: + ... _ = f.write(b">>sparse6<<:An\\n") + ... _ = f.seek(0) + ... G = nx.read_sparse6(f) + >>> list(G.edges()) + [(0, 1)] + + See Also + -------- + read_sparse6, from_sparse6_bytes + + References + ---------- + .. [1] Sparse6 specification + + + """ + glist = [] + for line in path: + line = line.strip() + if not len(line): + continue + glist.append(from_sparse6_bytes(line)) + if len(glist) == 1: + return glist[0] + else: + return glist + + +@not_implemented_for("directed") +@open_file(1, mode="wb") +def write_sparse6(G, path, nodes=None, header=True): + """Write graph G to given path in sparse6 format. + + Parameters + ---------- + G : Graph (undirected) + + path : file or string + File or filename to write. + Filenames ending in .gz or .bz2 will be compressed. + + nodes: list or iterable + Nodes are labeled 0...n-1 in the order provided. If None the ordering + given by G.nodes() is used. + + header: bool + If True add '>>sparse6<<' string to head of data + + Raises + ------ + NetworkXError + If the graph is directed + + Examples + -------- + You can write a sparse6 file by giving the path to the file:: + + >>> import tempfile + >>> with tempfile.NamedTemporaryFile(delete=False) as f: + ... nx.write_sparse6(nx.path_graph(2), f.name) + ... print(f.read()) + b'>>sparse6<<:An\\n' + + You can also write a sparse6 file by giving an open file-like object:: + + >>> with tempfile.NamedTemporaryFile() as f: + ... nx.write_sparse6(nx.path_graph(2), f) + ... _ = f.seek(0) + ... print(f.read()) + b'>>sparse6<<:An\\n' + + See Also + -------- + read_sparse6, from_sparse6_bytes + + Notes + ----- + The format does not support edge or node labels. + + References + ---------- + .. [1] Sparse6 specification + + + """ + if nodes is not None: + G = G.subgraph(nodes) + G = nx.convert_node_labels_to_integers(G, ordering="sorted") + for b in _generate_sparse6_bytes(G, nodes, header): + path.write(b) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..f6c606a Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_adjlist.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_adjlist.cpython-312.pyc new file mode 100644 index 0000000..6fd6c93 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_adjlist.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_edgelist.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_edgelist.cpython-312.pyc new file mode 100644 index 0000000..0d72b0f Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_edgelist.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_gexf.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_gexf.cpython-312.pyc new file mode 100644 index 0000000..84c43c3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_gexf.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_gml.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_gml.cpython-312.pyc new file mode 100644 index 0000000..1e2489c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_gml.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_graph6.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_graph6.cpython-312.pyc new file mode 100644 index 0000000..a557663 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_graph6.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_graphml.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_graphml.cpython-312.pyc new file mode 100644 index 0000000..7f34fd8 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_graphml.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_leda.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_leda.cpython-312.pyc new file mode 100644 index 0000000..3f7cae6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_leda.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_p2g.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_p2g.cpython-312.pyc new file mode 100644 index 0000000..af4c2ab Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_p2g.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_pajek.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_pajek.cpython-312.pyc new file mode 100644 index 0000000..af6a082 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_pajek.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_sparse6.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_sparse6.cpython-312.pyc new file mode 100644 index 0000000..e016a64 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_sparse6.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_text.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_text.cpython-312.pyc new file mode 100644 index 0000000..45676f2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/__pycache__/test_text.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_adjlist.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_adjlist.py new file mode 100644 index 0000000..f2218eb --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_adjlist.py @@ -0,0 +1,262 @@ +""" +Unit tests for adjlist. +""" + +import io + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, graphs_equal, nodes_equal + + +class TestAdjlist: + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + cls.DG = nx.DiGraph(cls.G) + cls.XG = nx.MultiGraph() + cls.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) + cls.XDG = nx.MultiDiGraph(cls.XG) + + def test_read_multiline_adjlist_1(self): + # Unit test for https://networkx.lanl.gov/trac/ticket/252 + s = b"""# comment line +1 2 +# comment line +2 +3 +""" + bytesIO = io.BytesIO(s) + G = nx.read_multiline_adjlist(bytesIO) + adj = {"1": {"3": {}, "2": {}}, "3": {"1": {}}, "2": {"1": {}}} + assert graphs_equal(G, nx.Graph(adj)) + + def test_unicode(self, tmp_path): + G = nx.Graph() + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) + + fname = tmp_path / "adjlist.txt" + nx.write_multiline_adjlist(G, fname) + H = nx.read_multiline_adjlist(fname) + assert graphs_equal(G, H) + + def test_latin1_err(self, tmp_path): + G = nx.Graph() + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) + fname = tmp_path / "adjlist.txt" + with pytest.raises(UnicodeEncodeError): + nx.write_multiline_adjlist(G, fname, encoding="latin-1") + + def test_latin1(self, tmp_path): + G = nx.Graph() + name1 = "Bj" + chr(246) + "rk" + name2 = chr(220) + "ber" + G.add_edge(name1, "Radiohead", **{name2: 3}) + fname = tmp_path / "adjlist.txt" + nx.write_multiline_adjlist(G, fname, encoding="latin-1") + H = nx.read_multiline_adjlist(fname, encoding="latin-1") + assert graphs_equal(G, H) + + def test_parse_adjlist(self): + lines = ["1 2 5", "2 3 4", "3 5", "4", "5"] + nx.parse_adjlist(lines, nodetype=int) # smoke test + with pytest.raises(TypeError): + nx.parse_adjlist(lines, nodetype="int") + lines = ["1 2 5", "2 b", "c"] + with pytest.raises(TypeError): + nx.parse_adjlist(lines, nodetype=int) + + def test_adjlist_graph(self, tmp_path): + G = self.G + fname = tmp_path / "adjlist.txt" + nx.write_adjlist(G, fname) + H = nx.read_adjlist(fname) + H2 = nx.read_adjlist(fname) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_adjlist_digraph(self, tmp_path): + G = self.DG + fname = tmp_path / "adjlist.txt" + nx.write_adjlist(G, fname) + H = nx.read_adjlist(fname, create_using=nx.DiGraph()) + H2 = nx.read_adjlist(fname, create_using=nx.DiGraph()) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_adjlist_integers(self, tmp_path): + fname = tmp_path / "adjlist.txt" + G = nx.convert_node_labels_to_integers(self.G) + nx.write_adjlist(G, fname) + H = nx.read_adjlist(fname, nodetype=int) + H2 = nx.read_adjlist(fname, nodetype=int) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_adjlist_multigraph(self, tmp_path): + G = self.XG + fname = tmp_path / "adjlist.txt" + nx.write_adjlist(G, fname) + H = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) + H2 = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_adjlist_multidigraph(self, tmp_path): + G = self.XDG + fname = tmp_path / "adjlist.txt" + nx.write_adjlist(G, fname) + H = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiDiGraph()) + H2 = nx.read_adjlist(fname, nodetype=int, create_using=nx.MultiDiGraph()) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_adjlist_delimiter(self): + fh = io.BytesIO() + G = nx.path_graph(3) + nx.write_adjlist(G, fh, delimiter=":") + fh.seek(0) + H = nx.read_adjlist(fh, nodetype=int, delimiter=":") + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + +class TestMultilineAdjlist: + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + cls.DG = nx.DiGraph(cls.G) + cls.DG.remove_edge("b", "a") + cls.DG.remove_edge("b", "c") + cls.XG = nx.MultiGraph() + cls.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) + cls.XDG = nx.MultiDiGraph(cls.XG) + + def test_parse_multiline_adjlist(self): + lines = [ + "1 2", + "b {'weight':3, 'name': 'Frodo'}", + "c {}", + "d 1", + "e {'weight':6, 'name': 'Saruman'}", + ] + nx.parse_multiline_adjlist(iter(lines)) # smoke test + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines), nodetype=int) + nx.parse_multiline_adjlist(iter(lines), edgetype=str) # smoke test + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines), nodetype=int) + lines = ["1 a"] + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines)) + lines = ["a 2"] + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines), nodetype=int) + lines = ["1 2"] + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines)) + lines = ["1 2", "2 {}"] + with pytest.raises(TypeError): + nx.parse_multiline_adjlist(iter(lines)) + + def test_multiline_adjlist_graph(self, tmp_path): + G = self.G + fname = tmp_path / "adjlist.txt" + nx.write_multiline_adjlist(G, fname) + H = nx.read_multiline_adjlist(fname) + H2 = nx.read_multiline_adjlist(fname) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_multiline_adjlist_digraph(self, tmp_path): + G = self.DG + fname = tmp_path / "adjlist.txt" + nx.write_multiline_adjlist(G, fname) + H = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph()) + H2 = nx.read_multiline_adjlist(fname, create_using=nx.DiGraph()) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_multiline_adjlist_integers(self, tmp_path): + fname = tmp_path / "adjlist.txt" + G = nx.convert_node_labels_to_integers(self.G) + nx.write_multiline_adjlist(G, fname) + H = nx.read_multiline_adjlist(fname, nodetype=int) + H2 = nx.read_multiline_adjlist(fname, nodetype=int) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_multiline_adjlist_multigraph(self, tmp_path): + G = self.XG + fname = tmp_path / "adjlist.txt" + nx.write_multiline_adjlist(G, fname) + H = nx.read_multiline_adjlist(fname, nodetype=int, create_using=nx.MultiGraph()) + H2 = nx.read_multiline_adjlist( + fname, nodetype=int, create_using=nx.MultiGraph() + ) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_multiline_adjlist_multidigraph(self, tmp_path): + G = self.XDG + fname = tmp_path / "adjlist.txt" + nx.write_multiline_adjlist(G, fname) + H = nx.read_multiline_adjlist( + fname, nodetype=int, create_using=nx.MultiDiGraph() + ) + H2 = nx.read_multiline_adjlist( + fname, nodetype=int, create_using=nx.MultiDiGraph() + ) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_multiline_adjlist_delimiter(self): + fh = io.BytesIO() + G = nx.path_graph(3) + nx.write_multiline_adjlist(G, fh, delimiter=":") + fh.seek(0) + H = nx.read_multiline_adjlist(fh, nodetype=int, delimiter=":") + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + +@pytest.mark.parametrize( + ("lines", "delim"), + ( + (["1 2 5", "2 3 4", "3 5", "4", "5"], None), # No extra whitespace + (["1\t2\t5", "2\t3\t4", "3\t5", "4", "5"], "\t"), # tab-delimited + ( + ["1\t2\t5", "2\t3\t4", "3\t5\t", "4\t", "5"], + "\t", + ), # tab-delimited, extra delims + ( + ["1\t2\t5", "2\t3\t4", "3\t5\t\t\n", "4\t", "5"], + "\t", + ), # extra delim+newlines + ), +) +def test_adjlist_rstrip_parsing(lines, delim): + """Regression test related to gh-7465""" + expected = nx.Graph([(1, 2), (1, 5), (2, 3), (2, 4), (3, 5)]) + nx.utils.graphs_equal(nx.parse_adjlist(lines, delimiter=delim), expected) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_edgelist.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_edgelist.py new file mode 100644 index 0000000..fe58b3b --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_edgelist.py @@ -0,0 +1,314 @@ +""" +Unit tests for edgelists. +""" + +import io +import textwrap + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, graphs_equal, nodes_equal + +edges_no_data = textwrap.dedent( + """ + # comment line + 1 2 + # comment line + 2 3 + """ +) + + +edges_with_values = textwrap.dedent( + """ + # comment line + 1 2 2.0 + # comment line + 2 3 3.0 + """ +) + + +edges_with_weight = textwrap.dedent( + """ + # comment line + 1 2 {'weight':2.0} + # comment line + 2 3 {'weight':3.0} + """ +) + + +edges_with_multiple_attrs = textwrap.dedent( + """ + # comment line + 1 2 {'weight':2.0, 'color':'green'} + # comment line + 2 3 {'weight':3.0, 'color':'red'} + """ +) + + +edges_with_multiple_attrs_csv = textwrap.dedent( + """ + # comment line + 1, 2, {'weight':2.0, 'color':'green'} + # comment line + 2, 3, {'weight':3.0, 'color':'red'} + """ +) + + +_expected_edges_weights = [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})] +_expected_edges_multiattr = [ + (1, 2, {"weight": 2.0, "color": "green"}), + (2, 3, {"weight": 3.0, "color": "red"}), +] + + +@pytest.mark.parametrize( + ("data", "extra_kwargs"), + ( + (edges_no_data, {}), + (edges_with_values, {}), + (edges_with_weight, {}), + (edges_with_multiple_attrs, {}), + (edges_with_multiple_attrs_csv, {"delimiter": ","}), + ), +) +def test_read_edgelist_no_data(data, extra_kwargs): + bytesIO = io.BytesIO(data.encode("utf-8")) + G = nx.read_edgelist(bytesIO, nodetype=int, data=False, **extra_kwargs) + assert edges_equal(G.edges(), [(1, 2), (2, 3)]) + + +def test_read_weighted_edgelist(): + bytesIO = io.BytesIO(edges_with_values.encode("utf-8")) + G = nx.read_weighted_edgelist(bytesIO, nodetype=int) + assert edges_equal(G.edges(data=True), _expected_edges_weights) + + +@pytest.mark.parametrize( + ("data", "extra_kwargs", "expected"), + ( + (edges_with_weight, {}, _expected_edges_weights), + (edges_with_multiple_attrs, {}, _expected_edges_multiattr), + (edges_with_multiple_attrs_csv, {"delimiter": ","}, _expected_edges_multiattr), + ), +) +def test_read_edgelist_with_data(data, extra_kwargs, expected): + bytesIO = io.BytesIO(data.encode("utf-8")) + G = nx.read_edgelist(bytesIO, nodetype=int, **extra_kwargs) + assert edges_equal(G.edges(data=True), expected) + + +@pytest.fixture +def example_graph(): + G = nx.Graph() + G.add_weighted_edges_from([(1, 2, 3.0), (2, 3, 27.0), (3, 4, 3.0)]) + return G + + +def test_parse_edgelist_no_data(example_graph): + G = example_graph + H = nx.parse_edgelist(["1 2", "2 3", "3 4"], nodetype=int) + assert nodes_equal(G.nodes, H.nodes) + assert edges_equal(G.edges, H.edges) + + +def test_parse_edgelist_with_data_dict(example_graph): + G = example_graph + H = nx.parse_edgelist( + ["1 2 {'weight': 3}", "2 3 {'weight': 27}", "3 4 {'weight': 3.0}"], nodetype=int + ) + assert nodes_equal(G.nodes, H.nodes) + assert edges_equal(G.edges(data=True), H.edges(data=True)) + + +def test_parse_edgelist_with_data_list(example_graph): + G = example_graph + H = nx.parse_edgelist( + ["1 2 3", "2 3 27", "3 4 3.0"], nodetype=int, data=(("weight", float),) + ) + assert nodes_equal(G.nodes, H.nodes) + assert edges_equal(G.edges(data=True), H.edges(data=True)) + + +def test_parse_edgelist(): + # ignore lines with less than 2 nodes + lines = ["1;2", "2 3", "3 4"] + G = nx.parse_edgelist(lines, nodetype=int) + assert list(G.edges()) == [(2, 3), (3, 4)] + # unknown nodetype + with pytest.raises(TypeError, match="Failed to convert nodes"): + lines = ["1 2", "2 3", "3 4"] + nx.parse_edgelist(lines, nodetype="nope") + # lines have invalid edge format + with pytest.raises(TypeError, match="Failed to convert edge data"): + lines = ["1 2 3", "2 3", "3 4"] + nx.parse_edgelist(lines, nodetype=int) + # edge data and data_keys not the same length + with pytest.raises(IndexError, match="not the same length"): + lines = ["1 2 3", "2 3 27", "3 4 3.0"] + nx.parse_edgelist( + lines, nodetype=int, data=(("weight", float), ("capacity", int)) + ) + # edge data can't be converted to edge type + with pytest.raises(TypeError, match="Failed to convert"): + lines = ["1 2 't1'", "2 3 't3'", "3 4 't3'"] + nx.parse_edgelist(lines, nodetype=int, data=(("weight", float),)) + + +def test_comments_None(): + edgelist = ["node#1 node#2", "node#2 node#3"] + # comments=None supported to ignore all comment characters + G = nx.parse_edgelist(edgelist, comments=None) + H = nx.Graph([e.split(" ") for e in edgelist]) + assert edges_equal(G.edges, H.edges) + + +class TestEdgelist: + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + cls.DG = nx.DiGraph(cls.G) + cls.XG = nx.MultiGraph() + cls.XG.add_weighted_edges_from([(1, 2, 5), (1, 2, 5), (1, 2, 1), (3, 3, 42)]) + cls.XDG = nx.MultiDiGraph(cls.XG) + + def test_write_edgelist_1(self): + fh = io.BytesIO() + G = nx.Graph() + G.add_edges_from([(1, 2), (2, 3)]) + nx.write_edgelist(G, fh, data=False) + fh.seek(0) + assert fh.read() == b"1 2\n2 3\n" + + def test_write_edgelist_2(self): + fh = io.BytesIO() + G = nx.Graph() + G.add_edges_from([(1, 2), (2, 3)]) + nx.write_edgelist(G, fh, data=True) + fh.seek(0) + assert fh.read() == b"1 2 {}\n2 3 {}\n" + + def test_write_edgelist_3(self): + fh = io.BytesIO() + G = nx.Graph() + G.add_edge(1, 2, weight=2.0) + G.add_edge(2, 3, weight=3.0) + nx.write_edgelist(G, fh, data=True) + fh.seek(0) + assert fh.read() == b"1 2 {'weight': 2.0}\n2 3 {'weight': 3.0}\n" + + def test_write_edgelist_4(self): + fh = io.BytesIO() + G = nx.Graph() + G.add_edge(1, 2, weight=2.0) + G.add_edge(2, 3, weight=3.0) + nx.write_edgelist(G, fh, data=[("weight")]) + fh.seek(0) + assert fh.read() == b"1 2 2.0\n2 3 3.0\n" + + def test_unicode(self, tmp_path): + G = nx.Graph() + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) + fname = tmp_path / "el.txt" + nx.write_edgelist(G, fname) + H = nx.read_edgelist(fname) + assert graphs_equal(G, H) + + def test_latin1_issue(self, tmp_path): + G = nx.Graph() + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", **{name2: 3}) + fname = tmp_path / "el.txt" + with pytest.raises(UnicodeEncodeError): + nx.write_edgelist(G, fname, encoding="latin-1") + + def test_latin1(self, tmp_path): + G = nx.Graph() + name1 = "Bj" + chr(246) + "rk" + name2 = chr(220) + "ber" + G.add_edge(name1, "Radiohead", **{name2: 3}) + fname = tmp_path / "el.txt" + + nx.write_edgelist(G, fname, encoding="latin-1") + H = nx.read_edgelist(fname, encoding="latin-1") + assert graphs_equal(G, H) + + def test_edgelist_graph(self, tmp_path): + G = self.G + fname = tmp_path / "el.txt" + nx.write_edgelist(G, fname) + H = nx.read_edgelist(fname) + H2 = nx.read_edgelist(fname) + assert H is not H2 # they should be different graphs + G.remove_node("g") # isolated nodes are not written in edgelist + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_edgelist_digraph(self, tmp_path): + G = self.DG + fname = tmp_path / "el.txt" + nx.write_edgelist(G, fname) + H = nx.read_edgelist(fname, create_using=nx.DiGraph()) + H2 = nx.read_edgelist(fname, create_using=nx.DiGraph()) + assert H is not H2 # they should be different graphs + G.remove_node("g") # isolated nodes are not written in edgelist + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_edgelist_integers(self, tmp_path): + G = nx.convert_node_labels_to_integers(self.G) + fname = tmp_path / "el.txt" + nx.write_edgelist(G, fname) + H = nx.read_edgelist(fname, nodetype=int) + # isolated nodes are not written in edgelist + G.remove_nodes_from(list(nx.isolates(G))) + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_edgelist_multigraph(self, tmp_path): + G = self.XG + fname = tmp_path / "el.txt" + nx.write_edgelist(G, fname) + H = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) + H2 = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph()) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + def test_edgelist_multidigraph(self, tmp_path): + G = self.XDG + fname = tmp_path / "el.txt" + nx.write_edgelist(G, fname) + H = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiDiGraph()) + H2 = nx.read_edgelist(fname, nodetype=int, create_using=nx.MultiDiGraph()) + assert H is not H2 # they should be different graphs + assert nodes_equal(list(H), list(G)) + assert edges_equal(list(H.edges()), list(G.edges())) + + +def test_edgelist_consistent_strip_handling(): + """See gh-7462 + + Input when printed looks like:: + + 1 2 3 + 2 3 + 3 4 3.0 + + Note the trailing \\t after the `3` in the second row, indicating an empty + data value. + """ + s = io.StringIO("1\t2\t3\n2\t3\t\n3\t4\t3.0") + G = nx.parse_edgelist(s, delimiter="\t", nodetype=int, data=[("value", str)]) + assert sorted(G.edges(data="value")) == [(1, 2, "3"), (2, 3, ""), (3, 4, "3.0")] diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_gexf.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_gexf.py new file mode 100644 index 0000000..d69a5ef --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_gexf.py @@ -0,0 +1,579 @@ +import io +import time + +import pytest + +import networkx as nx + + +@pytest.mark.parametrize("time_attr", ("start", "end")) +@pytest.mark.parametrize("dyn_attr", ("static", "dynamic")) +def test_dynamic_graph_has_timeformat(time_attr, dyn_attr, tmp_path): + """Ensure that graphs which have a 'start' or 'stop' attribute get a + 'timeformat' attribute upon parsing. See gh-7914.""" + G = nx.MultiGraph(mode=dyn_attr) + G.add_node(0) + G.nodes[0][time_attr] = 1 + # Write out + fname = tmp_path / "foo.gexf" + nx.write_gexf(G, fname) + # Check that timeformat is added to saved data + with open(fname) as fh: + assert 'timeformat="long"' in fh.read() + # Round-trip + H = nx.read_gexf(fname) + # If any node has a "start" or "end" attr, it is considered dynamic + # regardless of the graph "mode" attr + assert H.graph["mode"] == "dynamic" + assert nx.utils.nodes_equal(G.edges, H.edges) + + +class TestGEXF: + @classmethod + def setup_class(cls): + cls.simple_directed_data = """ + + + + + + + + + + + +""" + cls.simple_directed_graph = nx.DiGraph() + cls.simple_directed_graph.add_node("0", label="Hello") + cls.simple_directed_graph.add_node("1", label="World") + cls.simple_directed_graph.add_edge("0", "1", id="0") + + cls.simple_directed_fh = io.BytesIO(cls.simple_directed_data.encode("UTF-8")) + + cls.attribute_data = """\ + + + Gephi.org + A Web network + + + + + + + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +""" + cls.attribute_graph = nx.DiGraph() + cls.attribute_graph.graph["node_default"] = {"frog": True} + cls.attribute_graph.add_node( + "0", label="Gephi", url="https://gephi.org", indegree=1, frog=False + ) + cls.attribute_graph.add_node( + "1", label="Webatlas", url="http://webatlas.fr", indegree=2, frog=False + ) + cls.attribute_graph.add_node( + "2", label="RTGI", url="http://rtgi.fr", indegree=1, frog=True + ) + cls.attribute_graph.add_node( + "3", + label="BarabasiLab", + url="http://barabasilab.com", + indegree=1, + frog=True, + ) + cls.attribute_graph.add_edge("0", "1", id="0", label="foo") + cls.attribute_graph.add_edge("0", "2", id="1") + cls.attribute_graph.add_edge("1", "0", id="2") + cls.attribute_graph.add_edge("2", "1", id="3") + cls.attribute_graph.add_edge("0", "3", id="4") + cls.attribute_fh = io.BytesIO(cls.attribute_data.encode("UTF-8")) + + cls.simple_undirected_data = """ + + + + + + + + + + + +""" + cls.simple_undirected_graph = nx.Graph() + cls.simple_undirected_graph.add_node("0", label="Hello") + cls.simple_undirected_graph.add_node("1", label="World") + cls.simple_undirected_graph.add_edge("0", "1", id="0") + + cls.simple_undirected_fh = io.BytesIO( + cls.simple_undirected_data.encode("UTF-8") + ) + + def test_read_simple_directed_graphml(self): + G = self.simple_directed_graph + H = nx.read_gexf(self.simple_directed_fh) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) + self.simple_directed_fh.seek(0) + + def test_write_read_simple_directed_graphml(self): + G = self.simple_directed_graph + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) + self.simple_directed_fh.seek(0) + + def test_read_simple_undirected_graphml(self): + G = self.simple_undirected_graph + H = nx.read_gexf(self.simple_undirected_fh) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + self.simple_undirected_fh.seek(0) + + def test_read_attribute_graphml(self): + G = self.attribute_graph + H = nx.read_gexf(self.attribute_fh) + assert sorted(G.nodes(True)) == sorted(H.nodes(data=True)) + ge = sorted(G.edges(data=True)) + he = sorted(H.edges(data=True)) + for a, b in zip(ge, he): + assert a == b + self.attribute_fh.seek(0) + + def test_directed_edge_in_undirected(self): + s = """ + + + + + + + + + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_gexf, fh) + + def test_undirected_edge_in_directed(self): + s = """ + + + + + + + + + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_gexf, fh) + + def test_key_raises(self): + s = """ + + + + + + + + + + + + + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_gexf, fh) + + def test_relabel(self): + s = """ + + + + + + + + + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + G = nx.read_gexf(fh, relabel=True) + assert sorted(G.nodes()) == ["Hello", "Word"] + + def test_default_attribute(self): + G = nx.Graph() + G.add_node(1, label="1", color="green") + nx.add_path(G, [0, 1, 2, 3]) + G.add_edge(1, 2, foo=3) + G.graph["node_default"] = {"color": "yellow"} + G.graph["edge_default"] = {"foo": 7} + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + # Reading a gexf graph always sets mode attribute to either + # 'static' or 'dynamic'. Remove the mode attribute from the + # read graph for the sake of comparing remaining attributes. + del H.graph["mode"] + assert G.graph == H.graph + + def test_serialize_ints_to_strings(self): + G = nx.Graph() + G.add_node(1, id=7, label=77) + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert list(H) == [7] + assert H.nodes[7]["label"] == "77" + + def test_write_with_node_attributes(self): + # Addresses #673. + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (2, 3)]) + for i in range(4): + G.nodes[i]["id"] = i + G.nodes[i]["label"] = i + G.nodes[i]["pid"] = i + G.nodes[i]["start"] = i + G.nodes[i]["end"] = i + 1 + + expected = f""" + + NetworkX {nx.__version__} + + + + + + + + + + + + + + +""" + obtained = "\n".join(nx.generate_gexf(G)) + assert expected == obtained + + def test_edge_id_construct(self): + G = nx.Graph() + G.add_edges_from([(0, 1, {"id": 0}), (1, 2, {"id": 2}), (2, 3)]) + + expected = f""" + + NetworkX {nx.__version__} + + + + + + + + + + + + + + +""" + + obtained = "\n".join(nx.generate_gexf(G)) + assert expected == obtained + + def test_numpy_type(self): + np = pytest.importorskip("numpy") + G = nx.path_graph(4) + nx.set_node_attributes(G, {n: n for n in np.arange(4)}, "number") + G[0][1]["edge-number"] = np.float64(1.1) + + expected = f""" + + NetworkX {nx.__version__} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +""" + obtained = "\n".join(nx.generate_gexf(G)) + assert expected == obtained + + def test_bool(self): + G = nx.Graph() + G.add_node(1, testattr=True) + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert H.nodes[1]["testattr"] + + # Test for NaN, INF and -INF + def test_specials(self): + from math import isnan + + inf, nan = float("inf"), float("nan") + G = nx.Graph() + G.add_node(1, testattr=inf, strdata="inf", key="a") + G.add_node(2, testattr=nan, strdata="nan", key="b") + G.add_node(3, testattr=-inf, strdata="-inf", key="c") + + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + filetext = fh.read() + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + + assert b"INF" in filetext + assert b"NaN" in filetext + assert b"-INF" in filetext + + assert H.nodes[1]["testattr"] == inf + assert isnan(H.nodes[2]["testattr"]) + assert H.nodes[3]["testattr"] == -inf + + assert H.nodes[1]["strdata"] == "inf" + assert H.nodes[2]["strdata"] == "nan" + assert H.nodes[3]["strdata"] == "-inf" + + assert H.nodes[1]["networkx_key"] == "a" + assert H.nodes[2]["networkx_key"] == "b" + assert H.nodes[3]["networkx_key"] == "c" + + def test_simple_list(self): + G = nx.Graph() + list_value = [(1, 2, 3), (9, 1, 2)] + G.add_node(1, key=list_value) + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert H.nodes[1]["networkx_key"] == list_value + + def test_dynamic_mode(self): + G = nx.Graph() + G.add_node(1, label="1", color="green") + G.graph["mode"] = "dynamic" + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + def test_multigraph_with_missing_attributes(self): + G = nx.MultiGraph() + G.add_node(0, label="1", color="green") + G.add_node(1, label="2", color="green") + G.add_edge(0, 1, id="0", weight=3, type="undirected", start=0, end=1) + G.add_edge(0, 1, id="1", label="foo", start=0, end=1) + G.add_edge(0, 1) + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + def test_missing_viz_attributes(self): + G = nx.Graph() + G.add_node(0, label="1", color="green") + G.nodes[0]["viz"] = {"size": 54} + G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1, "z": 0} + G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256} + G.nodes[0]["viz"]["shape"] = "http://random.url" + G.nodes[0]["viz"]["thickness"] = 2 + fh = io.BytesIO() + nx.write_gexf(G, fh, version="1.1draft") + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + # Test missing alpha value for version >draft1.1 - set default alpha value + # to 1.0 instead of `None` when writing for better general compatibility + fh = io.BytesIO() + # G.nodes[0]["viz"]["color"] does not have an alpha value explicitly defined + # so the default is used instead + nx.write_gexf(G, fh, version="1.2draft") + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert H.nodes[0]["viz"]["color"]["a"] == 1.0 + + # Second graph for the other branch + G = nx.Graph() + G.add_node(0, label="1", color="green") + G.nodes[0]["viz"] = {"size": 54} + G.nodes[0]["viz"]["position"] = {"x": 0, "y": 1, "z": 0} + G.nodes[0]["viz"]["color"] = {"r": 0, "g": 0, "b": 256, "a": 0.5} + G.nodes[0]["viz"]["shape"] = "ftp://random.url" + G.nodes[0]["viz"]["thickness"] = 2 + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + def test_slice_and_spell(self): + # Test spell first, so version = 1.2 + G = nx.Graph() + G.add_node(0, label="1", color="green") + G.nodes[0]["spells"] = [(1, 2)] + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + G = nx.Graph() + G.add_node(0, label="1", color="green") + G.nodes[0]["slices"] = [(1, 2)] + fh = io.BytesIO() + nx.write_gexf(G, fh, version="1.1draft") + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) + + def test_add_parent(self): + G = nx.Graph() + G.add_node(0, label="1", color="green", parents=[1, 2]) + fh = io.BytesIO() + nx.write_gexf(G, fh) + fh.seek(0) + H = nx.read_gexf(fh, node_type=int) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(sorted(e) for e in G.edges()) == sorted( + sorted(e) for e in H.edges() + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_gml.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_gml.py new file mode 100644 index 0000000..df25097 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_gml.py @@ -0,0 +1,744 @@ +import codecs +import io +import math +from ast import literal_eval +from contextlib import contextmanager +from textwrap import dedent + +import pytest + +import networkx as nx +from networkx.readwrite.gml import literal_destringizer, literal_stringizer + + +class TestGraph: + @classmethod + def setup_class(cls): + cls.simple_data = """Creator "me" +Version "xx" +graph [ + comment "This is a sample graph" + directed 1 + IsPlanar 1 + pos [ x 0 y 1 ] + node [ + id 1 + label "Node 1" + pos [ x 1 y 1 ] + ] + node [ + id 2 + pos [ x 1 y 2 ] + label "Node 2" + ] + node [ + id 3 + label "Node 3" + pos [ x 1 y 3 ] + ] + edge [ + source 1 + target 2 + label "Edge from node 1 to node 2" + color [line "blue" thickness 3] + + ] + edge [ + source 2 + target 3 + label "Edge from node 2 to node 3" + ] + edge [ + source 3 + target 1 + label "Edge from node 3 to node 1" + ] +] +""" + + def test_parse_gml_cytoscape_bug(self): + # example from issue #321, originally #324 in trac + cytoscape_example = """ +Creator "Cytoscape" +Version 1.0 +graph [ + node [ + root_index -3 + id -3 + graphics [ + x -96.0 + y -67.0 + w 40.0 + h 40.0 + fill "#ff9999" + type "ellipse" + outline "#666666" + outline_width 1.5 + ] + label "node2" + ] + node [ + root_index -2 + id -2 + graphics [ + x 63.0 + y 37.0 + w 40.0 + h 40.0 + fill "#ff9999" + type "ellipse" + outline "#666666" + outline_width 1.5 + ] + label "node1" + ] + node [ + root_index -1 + id -1 + graphics [ + x -31.0 + y -17.0 + w 40.0 + h 40.0 + fill "#ff9999" + type "ellipse" + outline "#666666" + outline_width 1.5 + ] + label "node0" + ] + edge [ + root_index -2 + target -2 + source -1 + graphics [ + width 1.5 + fill "#0000ff" + type "line" + Line [ + ] + source_arrow 0 + target_arrow 3 + ] + label "DirectedEdge" + ] + edge [ + root_index -1 + target -1 + source -3 + graphics [ + width 1.5 + fill "#0000ff" + type "line" + Line [ + ] + source_arrow 0 + target_arrow 3 + ] + label "DirectedEdge" + ] +] +""" + nx.parse_gml(cytoscape_example) + + def test_parse_gml(self): + G = nx.parse_gml(self.simple_data, label="label") + assert sorted(G.nodes()) == ["Node 1", "Node 2", "Node 3"] + assert sorted(G.edges()) == [ + ("Node 1", "Node 2"), + ("Node 2", "Node 3"), + ("Node 3", "Node 1"), + ] + + assert sorted(G.edges(data=True)) == [ + ( + "Node 1", + "Node 2", + { + "color": {"line": "blue", "thickness": 3}, + "label": "Edge from node 1 to node 2", + }, + ), + ("Node 2", "Node 3", {"label": "Edge from node 2 to node 3"}), + ("Node 3", "Node 1", {"label": "Edge from node 3 to node 1"}), + ] + + def test_read_gml(self, tmp_path): + fname = tmp_path / "test.gml" + with open(fname, "w") as fh: + fh.write(self.simple_data) + Gin = nx.read_gml(fname, label="label") + G = nx.parse_gml(self.simple_data, label="label") + assert sorted(G.nodes(data=True)) == sorted(Gin.nodes(data=True)) + assert sorted(G.edges(data=True)) == sorted(Gin.edges(data=True)) + + def test_labels_are_strings(self): + # GML requires labels to be strings (i.e., in quotes) + answer = """graph [ + node [ + id 0 + label "1203" + ] +]""" + G = nx.Graph() + G.add_node(1203) + data = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) + assert data == answer + + def test_relabel_duplicate(self): + data = """ +graph +[ + label "" + directed 1 + node + [ + id 0 + label "same" + ] + node + [ + id 1 + label "same" + ] +] +""" + fh = io.BytesIO(data.encode("UTF-8")) + fh.seek(0) + pytest.raises(nx.NetworkXError, nx.read_gml, fh, label="label") + + @pytest.mark.parametrize("stringizer", (None, literal_stringizer)) + def test_tuplelabels(self, stringizer): + # https://github.com/networkx/networkx/pull/1048 + # Writing tuple labels to GML failed. + G = nx.Graph() + G.add_edge((0, 1), (1, 0)) + data = "\n".join(nx.generate_gml(G, stringizer=stringizer)) + answer = """graph [ + node [ + id 0 + label "(0,1)" + ] + node [ + id 1 + label "(1,0)" + ] + edge [ + source 0 + target 1 + ] +]""" + assert data == answer + + def test_quotes(self, tmp_path): + # https://github.com/networkx/networkx/issues/1061 + # Encoding quotes as HTML entities. + G = nx.path_graph(1) + G.name = "path_graph(1)" + attr = 'This is "quoted" and this is a copyright: ' + chr(169) + G.nodes[0]["demo"] = attr + with open(tmp_path / "test.gml", "w+b") as fobj: + nx.write_gml(G, fobj) + fobj.seek(0) + # Should be bytes in 2.x and 3.x + data = fobj.read().strip().decode("ascii") + answer = """graph [ + name "path_graph(1)" + node [ + id 0 + label "0" + demo "This is "quoted" and this is a copyright: ©" + ] +]""" + assert data == answer + + def test_unicode_node(self, tmp_path): + node = "node" + chr(169) + G = nx.Graph() + G.add_node(node) + with open(tmp_path / "test.gml", "w+b") as fobj: + nx.write_gml(G, fobj) + fobj.seek(0) + # Should be bytes in 2.x and 3.x + data = fobj.read().strip().decode("ascii") + answer = """graph [ + node [ + id 0 + label "node©" + ] +]""" + assert data == answer + + def test_float_label(self, tmp_path): + node = 1.0 + G = nx.Graph() + G.add_node(node) + with open(tmp_path / "test.gml", "w+b") as fobj: + nx.write_gml(G, fobj) + fobj.seek(0) + # Should be bytes in 2.x and 3.x + data = fobj.read().strip().decode("ascii") + answer = """graph [ + node [ + id 0 + label "1.0" + ] +]""" + assert data == answer + + def test_special_float_label(self, tmp_path): + special_floats = [float("nan"), float("+inf"), float("-inf")] + try: + import numpy as np + + special_floats += [np.nan, np.inf, np.inf * -1] + except ImportError: + special_floats += special_floats + + G = nx.cycle_graph(len(special_floats)) + attrs = dict(enumerate(special_floats)) + nx.set_node_attributes(G, attrs, "nodefloat") + edges = list(G.edges) + attrs = {edges[i]: value for i, value in enumerate(special_floats)} + nx.set_edge_attributes(G, attrs, "edgefloat") + + with open(tmp_path / "test.gml", "w+b") as fobj: + nx.write_gml(G, fobj) + fobj.seek(0) + # Should be bytes in 2.x and 3.x + data = fobj.read().strip().decode("ascii") + answer = """graph [ + node [ + id 0 + label "0" + nodefloat NAN + ] + node [ + id 1 + label "1" + nodefloat +INF + ] + node [ + id 2 + label "2" + nodefloat -INF + ] + node [ + id 3 + label "3" + nodefloat NAN + ] + node [ + id 4 + label "4" + nodefloat +INF + ] + node [ + id 5 + label "5" + nodefloat -INF + ] + edge [ + source 0 + target 1 + edgefloat NAN + ] + edge [ + source 0 + target 5 + edgefloat +INF + ] + edge [ + source 1 + target 2 + edgefloat -INF + ] + edge [ + source 2 + target 3 + edgefloat NAN + ] + edge [ + source 3 + target 4 + edgefloat +INF + ] + edge [ + source 4 + target 5 + edgefloat -INF + ] +]""" + assert data == answer + + fobj.seek(0) + graph = nx.read_gml(fobj) + for indx, value in enumerate(special_floats): + node_value = graph.nodes[str(indx)]["nodefloat"] + if math.isnan(value): + assert math.isnan(node_value) + else: + assert node_value == value + + edge = edges[indx] + string_edge = (str(edge[0]), str(edge[1])) + edge_value = graph.edges[string_edge]["edgefloat"] + if math.isnan(value): + assert math.isnan(edge_value) + else: + assert edge_value == value + + def test_name(self): + G = nx.parse_gml('graph [ name "x" node [ id 0 label "x" ] ]') + assert "x" == G.graph["name"] + G = nx.parse_gml('graph [ node [ id 0 label "x" ] ]') + assert "" == G.name + assert "name" not in G.graph + + def test_graph_types(self): + for directed in [None, False, True]: + for multigraph in [None, False, True]: + gml = "graph [" + if directed is not None: + gml += " directed " + str(int(directed)) + if multigraph is not None: + gml += " multigraph " + str(int(multigraph)) + gml += ' node [ id 0 label "0" ]' + gml += " edge [ source 0 target 0 ]" + gml += " ]" + G = nx.parse_gml(gml) + assert bool(directed) == G.is_directed() + assert bool(multigraph) == G.is_multigraph() + gml = "graph [\n" + if directed is True: + gml += " directed 1\n" + if multigraph is True: + gml += " multigraph 1\n" + gml += """ node [ + id 0 + label "0" + ] + edge [ + source 0 + target 0 +""" + if multigraph: + gml += " key 0\n" + gml += " ]\n]" + assert gml == "\n".join(nx.generate_gml(G)) + + def test_data_types(self): + data = [ + True, + False, + 10**20, + -2e33, + "'", + '"&&&""', + [{(b"\xfd",): "\x7f", chr(0x4444): (1, 2)}, (2, "3")], + ] + data.append(chr(0x14444)) + data.append(literal_eval("{2.3j, 1 - 2.3j, ()}")) + G = nx.Graph() + G.name = data + G.graph["data"] = data + G.add_node(0, int=-1, data={"data": data}) + G.add_edge(0, 0, float=-2.5, data=data) + gml = "\n".join(nx.generate_gml(G, stringizer=literal_stringizer)) + G = nx.parse_gml(gml, destringizer=literal_destringizer) + assert data == G.name + assert {"name": data, "data": data} == G.graph + assert list(G.nodes(data=True)) == [(0, {"int": -1, "data": {"data": data}})] + assert list(G.edges(data=True)) == [(0, 0, {"float": -2.5, "data": data})] + G = nx.Graph() + G.graph["data"] = "frozenset([1, 2, 3])" + G = nx.parse_gml(nx.generate_gml(G), destringizer=literal_eval) + assert G.graph["data"] == "frozenset([1, 2, 3])" + + def test_escape_unescape(self): + gml = """graph [ + name "&"䑄��&unknown;" +]""" + G = nx.parse_gml(gml) + assert ( + '&"\x0f' + chr(0x4444) + "��&unknown;" + == G.name + ) + gml = "\n".join(nx.generate_gml(G)) + alnu = "#1234567890;&#x1234567890abcdef" + answer = ( + """graph [ + name "&"䑄&""" + + alnu + + """;&unknown;" +]""" + ) + assert answer == gml + + def test_exceptions(self, tmp_path): + pytest.raises(ValueError, literal_destringizer, "(") + pytest.raises(ValueError, literal_destringizer, "frozenset([1, 2, 3])") + pytest.raises(ValueError, literal_destringizer, literal_destringizer) + pytest.raises(ValueError, literal_stringizer, frozenset([1, 2, 3])) + pytest.raises(ValueError, literal_stringizer, literal_stringizer) + with open(tmp_path / "test.gml", "w+b") as f: + f.write(codecs.BOM_UTF8 + b"graph[]") + f.seek(0) + pytest.raises(nx.NetworkXError, nx.read_gml, f) + + def assert_parse_error(gml): + pytest.raises(nx.NetworkXError, nx.parse_gml, gml) + + assert_parse_error(["graph [\n\n", "]"]) + assert_parse_error("") + assert_parse_error('Creator ""') + assert_parse_error("0") + assert_parse_error("graph ]") + assert_parse_error("graph [ 1 ]") + assert_parse_error("graph [ 1.E+2 ]") + assert_parse_error('graph [ "A" ]') + assert_parse_error("graph [ ] graph ]") + assert_parse_error("graph [ ] graph [ ]") + assert_parse_error("graph [ data [1, 2, 3] ]") + assert_parse_error("graph [ node [ ] ]") + assert_parse_error("graph [ node [ id 0 ] ]") + nx.parse_gml('graph [ node [ id "a" ] ]', label="id") + assert_parse_error("graph [ node [ id 0 label 0 ] node [ id 0 label 1 ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] node [ id 1 label 0 ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] edge [ ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] edge [ source 0 ] ]") + nx.parse_gml("graph [edge [ source 0 target 0 ] node [ id 0 label 0 ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] edge [ source 1 target 0 ] ]") + assert_parse_error("graph [ node [ id 0 label 0 ] edge [ source 0 target 1 ] ]") + assert_parse_error( + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 ] edge [ source 1 target 0 ] ]" + ) + nx.parse_gml( + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 ] edge [ source 1 target 0 ] " + "directed 1 ]" + ) + nx.parse_gml( + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 ] edge [ source 0 target 1 ]" + "multigraph 1 ]" + ) + nx.parse_gml( + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 key 0 ] edge [ source 0 target 1 ]" + "multigraph 1 ]" + ) + assert_parse_error( + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 key 0 ] edge [ source 0 target 1 key 0 ]" + "multigraph 1 ]" + ) + nx.parse_gml( + "graph [ node [ id 0 label 0 ] node [ id 1 label 1 ] " + "edge [ source 0 target 1 key 0 ] edge [ source 1 target 0 key 0 ]" + "directed 1 multigraph 1 ]" + ) + + # Tests for string convertible alphanumeric id and label values + nx.parse_gml("graph [edge [ source a target a ] node [ id a label b ] ]") + nx.parse_gml( + "graph [ node [ id n42 label 0 ] node [ id x43 label 1 ]" + "edge [ source n42 target x43 key 0 ]" + "edge [ source x43 target n42 key 0 ]" + "directed 1 multigraph 1 ]" + ) + assert_parse_error( + "graph [edge [ source '\u4200' target '\u4200' ] " + + "node [ id '\u4200' label b ] ]" + ) + + def assert_generate_error(*args, **kwargs): + pytest.raises( + nx.NetworkXError, lambda: list(nx.generate_gml(*args, **kwargs)) + ) + + G = nx.Graph() + G.graph[3] = 3 + assert_generate_error(G) + G = nx.Graph() + G.graph["3"] = 3 + assert_generate_error(G) + G = nx.Graph() + G.graph["data"] = frozenset([1, 2, 3]) + assert_generate_error(G, stringizer=literal_stringizer) + + def test_label_kwarg(self): + G = nx.parse_gml(self.simple_data, label="id") + assert sorted(G.nodes) == [1, 2, 3] + labels = [G.nodes[n]["label"] for n in sorted(G.nodes)] + assert labels == ["Node 1", "Node 2", "Node 3"] + + G = nx.parse_gml(self.simple_data, label=None) + assert sorted(G.nodes) == [1, 2, 3] + labels = [G.nodes[n]["label"] for n in sorted(G.nodes)] + assert labels == ["Node 1", "Node 2", "Node 3"] + + def test_outofrange_integers(self, tmp_path): + # GML restricts integers to 32 signed bits. + # Check that we honor this restriction on export + G = nx.Graph() + # Test export for numbers that barely fit or don't fit into 32 bits, + # and 3 numbers in the middle + numbers = { + "toosmall": (-(2**31)) - 1, + "small": -(2**31), + "med1": -4, + "med2": 0, + "med3": 17, + "big": (2**31) - 1, + "toobig": 2**31, + } + G.add_node("Node", **numbers) + + fname = tmp_path / "test.gml" + nx.write_gml(G, fname) + # Check that the export wrote the nonfitting numbers as strings + G2 = nx.read_gml(fname) + for attr, value in G2.nodes["Node"].items(): + if attr == "toosmall" or attr == "toobig": + assert isinstance(value, str) + else: + assert isinstance(value, int) + + def test_multiline(self): + # example from issue #6836 + multiline_example = """ +graph +[ + node + [ + id 0 + label "multiline node" + label2 "multiline1 + multiline2 + multiline3" + alt_name "id 0" + ] +] +""" + G = nx.parse_gml(multiline_example) + assert G.nodes["multiline node"] == { + "label2": "multiline1 multiline2 multiline3", + "alt_name": "id 0", + } + + +@contextmanager +def byte_file(): + _file_handle = io.BytesIO() + yield _file_handle + _file_handle.seek(0) + + +class TestPropertyLists: + def test_writing_graph_with_multi_element_property_list(self): + g = nx.Graph() + g.add_node("n1", properties=["element", 0, 1, 2.5, True, False]) + with byte_file() as f: + nx.write_gml(g, f) + result = f.read().decode() + + assert result == dedent( + """\ + graph [ + node [ + id 0 + label "n1" + properties "element" + properties 0 + properties 1 + properties 2.5 + properties 1 + properties 0 + ] + ] + """ + ) + + def test_writing_graph_with_one_element_property_list(self): + g = nx.Graph() + g.add_node("n1", properties=["element"]) + with byte_file() as f: + nx.write_gml(g, f) + result = f.read().decode() + + assert result == dedent( + """\ + graph [ + node [ + id 0 + label "n1" + properties "_networkx_list_start" + properties "element" + ] + ] + """ + ) + + def test_reading_graph_with_list_property(self): + with byte_file() as f: + f.write( + dedent( + """ + graph [ + node [ + id 0 + label "n1" + properties "element" + properties 0 + properties 1 + properties 2.5 + ] + ] + """ + ).encode("ascii") + ) + f.seek(0) + graph = nx.read_gml(f) + assert graph.nodes(data=True)["n1"] == {"properties": ["element", 0, 1, 2.5]} + + def test_reading_graph_with_single_element_list_property(self): + with byte_file() as f: + f.write( + dedent( + """ + graph [ + node [ + id 0 + label "n1" + properties "_networkx_list_start" + properties "element" + ] + ] + """ + ).encode("ascii") + ) + f.seek(0) + graph = nx.read_gml(f) + assert graph.nodes(data=True)["n1"] == {"properties": ["element"]} + + +@pytest.mark.parametrize("coll", ([], ())) +def test_stringize_empty_list_tuple(coll): + G = nx.path_graph(2) + G.nodes[0]["test"] = coll # test serializing an empty collection + f = io.BytesIO() + nx.write_gml(G, f) # Smoke test - should not raise + f.seek(0) + H = nx.read_gml(f) + assert H.nodes["0"]["test"] == coll # Check empty list round-trips properly + # Check full round-tripping. Note that nodes are loaded as strings by + # default, so there needs to be some remapping prior to comparison + H = nx.relabel_nodes(H, {"0": 0, "1": 1}) + assert nx.utils.graphs_equal(G, H) + # Same as above, but use destringizer for node remapping. Should have no + # effect on node attr + f.seek(0) + H = nx.read_gml(f, destringizer=int) + assert nx.utils.graphs_equal(G, H) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_graph6.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_graph6.py new file mode 100644 index 0000000..d680c21 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_graph6.py @@ -0,0 +1,181 @@ +from io import BytesIO + +import pytest + +import networkx as nx +import networkx.readwrite.graph6 as g6 +from networkx.utils import edges_equal, nodes_equal + + +def test_from_graph6_invariant_to_trailing_newline(): + """See gh-7557""" + G = nx.from_graph6_bytes(b">>graph6<>graph6<>graph6<<\nP~~~~~~~~~~~~~~~~~~~~~~{") + + +class TestGraph6Utils: + def test_n_data_n_conversion(self): + for i in [0, 1, 42, 62, 63, 64, 258047, 258048, 7744773, 68719476735]: + assert g6.data_to_n(g6.n_to_data(i))[0] == i + assert g6.data_to_n(g6.n_to_data(i))[1] == [] + assert g6.data_to_n(g6.n_to_data(i) + [42, 43])[1] == [42, 43] + + +class TestFromGraph6Bytes: + def test_from_graph6_bytes(self): + data = b"DF{" + G = nx.from_graph6_bytes(data) + assert nodes_equal(G.nodes(), [0, 1, 2, 3, 4]) + assert edges_equal( + G.edges(), [(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)] + ) + + def test_read_equals_from_bytes(self): + data = b"DF{" + G = nx.from_graph6_bytes(data) + fh = BytesIO(data) + Gin = nx.read_graph6(fh) + assert nodes_equal(G.nodes(), Gin.nodes()) + assert edges_equal(G.edges(), Gin.edges()) + + +class TestReadGraph6: + def test_read_many_graph6(self): + """Test for reading many graphs from a file into a list.""" + data = b"DF{\nD`{\nDqK\nD~{\n" + fh = BytesIO(data) + glist = nx.read_graph6(fh) + assert len(glist) == 4 + for G in glist: + assert sorted(G) == list(range(5)) + + +class TestWriteGraph6: + """Unit tests for writing a graph to a file in graph6 format.""" + + def test_null_graph(self): + result = BytesIO() + nx.write_graph6(nx.null_graph(), result) + assert result.getvalue() == b">>graph6<>graph6<<@\n" + + def test_complete_graph(self): + result = BytesIO() + nx.write_graph6(nx.complete_graph(4), result) + assert result.getvalue() == b">>graph6<>graph6<>graph6<>graph6<>graph6<<@\n" + + def test_complete_graph(self): + assert g6.to_graph6_bytes(nx.complete_graph(4)) == b">>graph6<>graph6< + + + + + + + + + + + + + + + + + + + + + + + + + + +""" + cls.simple_directed_graph = nx.DiGraph() + cls.simple_directed_graph.add_node("n10") + cls.simple_directed_graph.add_edge("n0", "n2", id="foo") + cls.simple_directed_graph.add_edge("n0", "n2") + cls.simple_directed_graph.add_edges_from( + [ + ("n1", "n2"), + ("n2", "n3"), + ("n3", "n5"), + ("n3", "n4"), + ("n4", "n6"), + ("n6", "n5"), + ("n5", "n7"), + ("n6", "n8"), + ("n8", "n7"), + ("n8", "n9"), + ] + ) + cls.simple_directed_fh = io.BytesIO(cls.simple_directed_data.encode("UTF-8")) + + cls.attribute_data = """ + + + yellow + + + + + green + + + + blue + + + red + + + + turquoise + + + 1.0 + + + 1.0 + + + 2.0 + + + + + + 1.1 + + + +""" + cls.attribute_graph = nx.DiGraph(id="G") + cls.attribute_graph.graph["node_default"] = {"color": "yellow"} + cls.attribute_graph.add_node("n0", color="green") + cls.attribute_graph.add_node("n2", color="blue") + cls.attribute_graph.add_node("n3", color="red") + cls.attribute_graph.add_node("n4") + cls.attribute_graph.add_node("n5", color="turquoise") + cls.attribute_graph.add_edge("n0", "n2", id="e0", weight=1.0) + cls.attribute_graph.add_edge("n0", "n1", id="e1", weight=1.0) + cls.attribute_graph.add_edge("n1", "n3", id="e2", weight=2.0) + cls.attribute_graph.add_edge("n3", "n2", id="e3") + cls.attribute_graph.add_edge("n2", "n4", id="e4") + cls.attribute_graph.add_edge("n3", "n5", id="e5") + cls.attribute_graph.add_edge("n5", "n4", id="e6", weight=1.1) + cls.attribute_fh = io.BytesIO(cls.attribute_data.encode("UTF-8")) + + cls.node_attribute_default_data = """ + + false + 0 + 0 + 0.0 + 0.0 + Foo + + + + + + + """ + cls.node_attribute_default_graph = nx.DiGraph(id="G") + cls.node_attribute_default_graph.graph["node_default"] = { + "boolean_attribute": False, + "int_attribute": 0, + "long_attribute": 0, + "float_attribute": 0.0, + "double_attribute": 0.0, + "string_attribute": "Foo", + } + cls.node_attribute_default_graph.add_node("n0") + cls.node_attribute_default_graph.add_node("n1") + cls.node_attribute_default_graph.add_edge("n0", "n1", id="e0") + cls.node_attribute_default_fh = io.BytesIO( + cls.node_attribute_default_data.encode("UTF-8") + ) + + cls.attribute_named_key_ids_data = """ + + + + + + + val1 + val2 + + + val_one + val2 + + + edge_value + + + +""" + cls.attribute_named_key_ids_graph = nx.DiGraph() + cls.attribute_named_key_ids_graph.add_node("0", prop1="val1", prop2="val2") + cls.attribute_named_key_ids_graph.add_node("1", prop1="val_one", prop2="val2") + cls.attribute_named_key_ids_graph.add_edge("0", "1", edge_prop="edge_value") + fh = io.BytesIO(cls.attribute_named_key_ids_data.encode("UTF-8")) + cls.attribute_named_key_ids_fh = fh + + cls.attribute_numeric_type_data = """ + + + + + + 1 + + + 2.0 + + + 1 + + + k + + + 1.0 + + + +""" + cls.attribute_numeric_type_graph = nx.DiGraph() + cls.attribute_numeric_type_graph.add_node("n0", weight=1) + cls.attribute_numeric_type_graph.add_node("n1", weight=2.0) + cls.attribute_numeric_type_graph.add_edge("n0", "n1", weight=1) + cls.attribute_numeric_type_graph.add_edge("n1", "n1", weight=1.0) + fh = io.BytesIO(cls.attribute_numeric_type_data.encode("UTF-8")) + cls.attribute_numeric_type_fh = fh + + cls.simple_undirected_data = """ + + + + + + + + + + +""" + # + cls.simple_undirected_graph = nx.Graph() + cls.simple_undirected_graph.add_node("n10") + cls.simple_undirected_graph.add_edge("n0", "n2", id="foo") + cls.simple_undirected_graph.add_edges_from([("n1", "n2"), ("n2", "n3")]) + fh = io.BytesIO(cls.simple_undirected_data.encode("UTF-8")) + cls.simple_undirected_fh = fh + + cls.undirected_multigraph_data = """ + + + + + + + + + + +""" + cls.undirected_multigraph = nx.MultiGraph() + cls.undirected_multigraph.add_node("n10") + cls.undirected_multigraph.add_edge("n0", "n2", id="e0") + cls.undirected_multigraph.add_edge("n1", "n2", id="e1") + cls.undirected_multigraph.add_edge("n2", "n1", id="e2") + fh = io.BytesIO(cls.undirected_multigraph_data.encode("UTF-8")) + cls.undirected_multigraph_fh = fh + + cls.undirected_multigraph_no_multiedge_data = """ + + + + + + + + + + +""" + cls.undirected_multigraph_no_multiedge = nx.MultiGraph() + cls.undirected_multigraph_no_multiedge.add_node("n10") + cls.undirected_multigraph_no_multiedge.add_edge("n0", "n2", id="e0") + cls.undirected_multigraph_no_multiedge.add_edge("n1", "n2", id="e1") + cls.undirected_multigraph_no_multiedge.add_edge("n2", "n3", id="e2") + fh = io.BytesIO(cls.undirected_multigraph_no_multiedge_data.encode("UTF-8")) + cls.undirected_multigraph_no_multiedge_fh = fh + + cls.multigraph_only_ids_for_multiedges_data = """ + + + + + + + + + + +""" + cls.multigraph_only_ids_for_multiedges = nx.MultiGraph() + cls.multigraph_only_ids_for_multiedges.add_node("n10") + cls.multigraph_only_ids_for_multiedges.add_edge("n0", "n2") + cls.multigraph_only_ids_for_multiedges.add_edge("n1", "n2", id="e1") + cls.multigraph_only_ids_for_multiedges.add_edge("n2", "n1", id="e2") + fh = io.BytesIO(cls.multigraph_only_ids_for_multiedges_data.encode("UTF-8")) + cls.multigraph_only_ids_for_multiedges_fh = fh + + +class TestReadGraphML(BaseGraphML): + def test_read_simple_directed_graphml(self): + G = self.simple_directed_graph + H = nx.read_graphml(self.simple_directed_fh) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) + self.simple_directed_fh.seek(0) + + PG = nx.parse_graphml(self.simple_directed_data) + assert sorted(G.nodes()) == sorted(PG.nodes()) + assert sorted(G.edges()) == sorted(PG.edges()) + assert sorted(G.edges(data=True)) == sorted(PG.edges(data=True)) + + def test_read_simple_undirected_graphml(self): + G = self.simple_undirected_graph + H = nx.read_graphml(self.simple_undirected_fh) + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + self.simple_undirected_fh.seek(0) + + PG = nx.parse_graphml(self.simple_undirected_data) + assert nodes_equal(G.nodes(), PG.nodes()) + assert edges_equal(G.edges(), PG.edges()) + + def test_read_undirected_multigraph_graphml(self): + G = self.undirected_multigraph + H = nx.read_graphml(self.undirected_multigraph_fh) + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + self.undirected_multigraph_fh.seek(0) + + PG = nx.parse_graphml(self.undirected_multigraph_data) + assert nodes_equal(G.nodes(), PG.nodes()) + assert edges_equal(G.edges(), PG.edges()) + + def test_read_undirected_multigraph_no_multiedge_graphml(self): + G = self.undirected_multigraph_no_multiedge + H = nx.read_graphml(self.undirected_multigraph_no_multiedge_fh) + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + self.undirected_multigraph_no_multiedge_fh.seek(0) + + PG = nx.parse_graphml(self.undirected_multigraph_no_multiedge_data) + assert nodes_equal(G.nodes(), PG.nodes()) + assert edges_equal(G.edges(), PG.edges()) + + def test_read_undirected_multigraph_only_ids_for_multiedges_graphml(self): + G = self.multigraph_only_ids_for_multiedges + H = nx.read_graphml(self.multigraph_only_ids_for_multiedges_fh) + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + self.multigraph_only_ids_for_multiedges_fh.seek(0) + + PG = nx.parse_graphml(self.multigraph_only_ids_for_multiedges_data) + assert nodes_equal(G.nodes(), PG.nodes()) + assert edges_equal(G.edges(), PG.edges()) + + def test_read_attribute_graphml(self): + G = self.attribute_graph + H = nx.read_graphml(self.attribute_fh) + assert nodes_equal(G.nodes(True), sorted(H.nodes(data=True))) + ge = sorted(G.edges(data=True)) + he = sorted(H.edges(data=True)) + for a, b in zip(ge, he): + assert a == b + self.attribute_fh.seek(0) + + PG = nx.parse_graphml(self.attribute_data) + assert sorted(G.nodes(True)) == sorted(PG.nodes(data=True)) + ge = sorted(G.edges(data=True)) + he = sorted(PG.edges(data=True)) + for a, b in zip(ge, he): + assert a == b + + def test_node_default_attribute_graphml(self): + G = self.node_attribute_default_graph + H = nx.read_graphml(self.node_attribute_default_fh) + assert G.graph["node_default"] == H.graph["node_default"] + + def test_directed_edge_in_undirected(self): + s = """ + + + + + + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, s) + + def test_undirected_edge_in_directed(self): + s = """ + + + + + + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, s) + + def test_key_raise(self): + s = """ + + + yellow + + + + + green + + + + blue + + + 1.0 + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, s) + + def test_hyperedge_raise(self): + s = """ + + + yellow + + + + + green + + + + blue + + + + + + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, s) + + def test_multigraph_keys(self): + # Test that reading multigraphs uses edge id attributes as keys + s = """ + + + + + + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + G = nx.read_graphml(fh) + expected = [("n0", "n1", "e0"), ("n0", "n1", "e1")] + assert sorted(G.edges(keys=True)) == expected + fh.seek(0) + H = nx.parse_graphml(s) + assert sorted(H.edges(keys=True)) == expected + + def test_preserve_multi_edge_data(self): + """ + Test that data and keys of edges are preserved on consequent + write and reads + """ + G = nx.MultiGraph() + G.add_node(1) + G.add_node(2) + G.add_edges_from( + [ + # edges with no data, no keys: + (1, 2), + # edges with only data: + (1, 2, {"key": "data_key1"}), + (1, 2, {"id": "data_id2"}), + (1, 2, {"key": "data_key3", "id": "data_id3"}), + # edges with both data and keys: + (1, 2, 103, {"key": "data_key4"}), + (1, 2, 104, {"id": "data_id5"}), + (1, 2, 105, {"key": "data_key6", "id": "data_id7"}), + ] + ) + fh = io.BytesIO() + nx.write_graphml(G, fh) + fh.seek(0) + H = nx.read_graphml(fh, node_type=int) + assert edges_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) + assert G._adj == H._adj + + Gadj = { + str(node): { + str(nbr): {str(ekey): dd for ekey, dd in key_dict.items()} + for nbr, key_dict in nbr_dict.items() + } + for node, nbr_dict in G._adj.items() + } + fh.seek(0) + HH = nx.read_graphml(fh, node_type=str, edge_key_type=str) + assert Gadj == HH._adj + + fh.seek(0) + string_fh = fh.read() + HH = nx.parse_graphml(string_fh, node_type=str, edge_key_type=str) + assert Gadj == HH._adj + + def test_yfiles_extension(self): + data = """ + + + + + + + + + + + + + + + + + + + + 1 + + + + + + + + + + + 2 + + + + + + + + + + + + 3 + + + + + + + + + + + + + + + + + + + + +""" + fh = io.BytesIO(data.encode("UTF-8")) + G = nx.read_graphml(fh, force_multigraph=True) + assert list(G.edges()) == [("n0", "n1")] + assert G.has_edge("n0", "n1", key="e0") + assert G.nodes["n0"]["label"] == "1" + assert G.nodes["n1"]["label"] == "2" + assert G.nodes["n2"]["label"] == "3" + assert G.nodes["n0"]["shape_type"] == "rectangle" + assert G.nodes["n1"]["shape_type"] == "rectangle" + assert G.nodes["n2"]["shape_type"] == "com.yworks.flowchart.terminator" + assert G.nodes["n2"]["description"] == "description\nline1\nline2" + fh.seek(0) + G = nx.read_graphml(fh) + assert list(G.edges()) == [("n0", "n1")] + assert G["n0"]["n1"]["id"] == "e0" + assert G.nodes["n0"]["label"] == "1" + assert G.nodes["n1"]["label"] == "2" + assert G.nodes["n2"]["label"] == "3" + assert G.nodes["n0"]["shape_type"] == "rectangle" + assert G.nodes["n1"]["shape_type"] == "rectangle" + assert G.nodes["n2"]["shape_type"] == "com.yworks.flowchart.terminator" + assert G.nodes["n2"]["description"] == "description\nline1\nline2" + + H = nx.parse_graphml(data, force_multigraph=True) + assert list(H.edges()) == [("n0", "n1")] + assert H.has_edge("n0", "n1", key="e0") + assert H.nodes["n0"]["label"] == "1" + assert H.nodes["n1"]["label"] == "2" + assert H.nodes["n2"]["label"] == "3" + + H = nx.parse_graphml(data) + assert list(H.edges()) == [("n0", "n1")] + assert H["n0"]["n1"]["id"] == "e0" + assert H.nodes["n0"]["label"] == "1" + assert H.nodes["n1"]["label"] == "2" + assert H.nodes["n2"]["label"] == "3" + + def test_bool(self): + s = """ + + + false + + + + true + + + + false + + + FaLsE + + + True + + + 0 + + + 1 + + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + G = nx.read_graphml(fh) + H = nx.parse_graphml(s) + for graph in [G, H]: + assert graph.nodes["n0"]["test"] + assert not graph.nodes["n2"]["test"] + assert not graph.nodes["n3"]["test"] + assert graph.nodes["n4"]["test"] + assert not graph.nodes["n5"]["test"] + assert graph.nodes["n6"]["test"] + + def test_graphml_header_line(self): + good = """ + + + false + + + + true + + + +""" + bad = """ + + + false + + + + true + + + +""" + ugly = """ + + + false + + + + true + + + +""" + for s in (good, bad): + fh = io.BytesIO(s.encode("UTF-8")) + G = nx.read_graphml(fh) + H = nx.parse_graphml(s) + for graph in [G, H]: + assert graph.nodes["n0"]["test"] + + fh = io.BytesIO(ugly.encode("UTF-8")) + pytest.raises(nx.NetworkXError, nx.read_graphml, fh) + pytest.raises(nx.NetworkXError, nx.parse_graphml, ugly) + + def test_read_attributes_with_groups(self): + data = """\ + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2 + + + + + + + + + + + + + + + + + + + + + + Group 3 + + + + + + + + + + Folder 3 + + + + + + + + + + + + + + + + + + + + + Group 1 + + + + + + + + + + Folder 1 + + + + + + + + + + + + + + + + + + 1 + + + + + + + + + + + + + + + + + + + 3 + + + + + + + + + + + + + + + + + + + + + + + + Group 2 + + + + + + + + + + Folder 2 + + + + + + + + + + + + + + + + + + 5 + + + + + + + + + + + + + + + + + + + 6 + + + + + + + + + + + + + + + + + + + + + + + 9 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +""" + # verify that nodes / attributes are correctly read when part of a group + fh = io.BytesIO(data.encode("UTF-8")) + G = nx.read_graphml(fh) + data = [x for _, x in G.nodes(data=True)] + assert len(data) == 9 + for node_data in data: + assert node_data["CustomProperty"] != "" + + def test_long_attribute_type(self): + # test that graphs with attr.type="long" (as produced by botch and + # dose3) can be parsed + s = """ + + + + + 4284 + + +""" + fh = io.BytesIO(s.encode("UTF-8")) + G = nx.read_graphml(fh) + expected = [("n1", {"cudfversion": 4284})] + assert sorted(G.nodes(data=True)) == expected + fh.seek(0) + H = nx.parse_graphml(s) + assert sorted(H.nodes(data=True)) == expected + + +class TestWriteGraphML(BaseGraphML): + writer = staticmethod(nx.write_graphml_lxml) + + @classmethod + def setup_class(cls): + BaseGraphML.setup_class() + _ = pytest.importorskip("lxml.etree") + + def test_write_interface(self): + try: + import lxml.etree + + assert nx.write_graphml == nx.write_graphml_lxml + except ImportError: + assert nx.write_graphml == nx.write_graphml_xml + + def test_write_read_simple_directed_graphml(self): + G = self.simple_directed_graph + G.graph["hi"] = "there" + fh = io.BytesIO() + self.writer(G, fh) + fh.seek(0) + H = nx.read_graphml(fh) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) + self.simple_directed_fh.seek(0) + + def test_GraphMLWriter_add_graphs(self): + gmlw = GraphMLWriter() + G = self.simple_directed_graph + H = G.copy() + gmlw.add_graphs([G, H]) + + def test_write_read_simple_no_prettyprint(self): + G = self.simple_directed_graph + G.graph["hi"] = "there" + G.graph["id"] = "1" + fh = io.BytesIO() + self.writer(G, fh, prettyprint=False) + fh.seek(0) + H = nx.read_graphml(fh) + assert sorted(G.nodes()) == sorted(H.nodes()) + assert sorted(G.edges()) == sorted(H.edges()) + assert sorted(G.edges(data=True)) == sorted(H.edges(data=True)) + self.simple_directed_fh.seek(0) + + def test_write_read_attribute_named_key_ids_graphml(self): + from xml.etree.ElementTree import parse + + G = self.attribute_named_key_ids_graph + fh = io.BytesIO() + self.writer(G, fh, named_key_ids=True) + fh.seek(0) + H = nx.read_graphml(fh) + fh.seek(0) + + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + assert edges_equal(G.edges(data=True), H.edges(data=True)) + self.attribute_named_key_ids_fh.seek(0) + + xml = parse(fh) + # Children are the key elements, and the graph element + children = list(xml.getroot()) + assert len(children) == 4 + + keys = [child.items() for child in children[:3]] + + assert len(keys) == 3 + assert ("id", "edge_prop") in keys[0] + assert ("attr.name", "edge_prop") in keys[0] + assert ("id", "prop2") in keys[1] + assert ("attr.name", "prop2") in keys[1] + assert ("id", "prop1") in keys[2] + assert ("attr.name", "prop1") in keys[2] + + # Confirm the read graph nodes/edge are identical when compared to + # default writing behavior. + default_behavior_fh = io.BytesIO() + nx.write_graphml(G, default_behavior_fh) + default_behavior_fh.seek(0) + H = nx.read_graphml(default_behavior_fh) + + named_key_ids_behavior_fh = io.BytesIO() + nx.write_graphml(G, named_key_ids_behavior_fh, named_key_ids=True) + named_key_ids_behavior_fh.seek(0) + J = nx.read_graphml(named_key_ids_behavior_fh) + + assert all(n1 == n2 for (n1, n2) in zip(H.nodes, J.nodes)) + assert all(e1 == e2 for (e1, e2) in zip(H.edges, J.edges)) + + def test_write_read_attribute_numeric_type_graphml(self): + from xml.etree.ElementTree import parse + + G = self.attribute_numeric_type_graph + fh = io.BytesIO() + self.writer(G, fh, infer_numeric_types=True) + fh.seek(0) + H = nx.read_graphml(fh) + fh.seek(0) + + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + assert edges_equal(G.edges(data=True), H.edges(data=True)) + self.attribute_numeric_type_fh.seek(0) + + xml = parse(fh) + # Children are the key elements, and the graph element + children = list(xml.getroot()) + assert len(children) == 3 + + keys = [child.items() for child in children[:2]] + + assert len(keys) == 2 + assert ("attr.type", "double") in keys[0] + assert ("attr.type", "double") in keys[1] + + def test_more_multigraph_keys(self, tmp_path): + """Writing keys as edge id attributes means keys become strings. + The original keys are stored as data, so read them back in + if `str(key) == edge_id` + This allows the adjacency to remain the same. + """ + G = nx.MultiGraph() + G.add_edges_from([("a", "b", 2), ("a", "b", 3)]) + fname = tmp_path / "test.graphml" + self.writer(G, fname) + H = nx.read_graphml(fname) + assert H.is_multigraph() + assert edges_equal(G.edges(keys=True), H.edges(keys=True)) + assert G._adj == H._adj + + def test_default_attribute(self): + G = nx.Graph(name="Fred") + G.add_node(1, label=1, color="green") + nx.add_path(G, [0, 1, 2, 3]) + G.add_edge(1, 2, weight=3) + G.graph["node_default"] = {"color": "yellow"} + G.graph["edge_default"] = {"weight": 7} + fh = io.BytesIO() + self.writer(G, fh) + fh.seek(0) + H = nx.read_graphml(fh, node_type=int) + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + assert G.graph == H.graph + + def test_mixed_type_attributes(self): + G = nx.MultiGraph() + G.add_node("n0", special=False) + G.add_node("n1", special=0) + G.add_edge("n0", "n1", special=False) + G.add_edge("n0", "n1", special=0) + fh = io.BytesIO() + self.writer(G, fh) + fh.seek(0) + H = nx.read_graphml(fh) + assert not H.nodes["n0"]["special"] + assert H.nodes["n1"]["special"] == 0 + assert not H.edges["n0", "n1", 0]["special"] + assert H.edges["n0", "n1", 1]["special"] == 0 + + def test_str_number_mixed_type_attributes(self): + G = nx.MultiGraph() + G.add_node("n0", special="hello") + G.add_node("n1", special=0) + G.add_edge("n0", "n1", special="hello") + G.add_edge("n0", "n1", special=0) + fh = io.BytesIO() + self.writer(G, fh) + fh.seek(0) + H = nx.read_graphml(fh) + assert H.nodes["n0"]["special"] == "hello" + assert H.nodes["n1"]["special"] == 0 + assert H.edges["n0", "n1", 0]["special"] == "hello" + assert H.edges["n0", "n1", 1]["special"] == 0 + + def test_mixed_int_type_number_attributes(self): + np = pytest.importorskip("numpy") + G = nx.MultiGraph() + G.add_node("n0", special=np.int64(0)) + G.add_node("n1", special=1) + G.add_edge("n0", "n1", special=np.int64(2)) + G.add_edge("n0", "n1", special=3) + fh = io.BytesIO() + self.writer(G, fh) + fh.seek(0) + H = nx.read_graphml(fh) + assert H.nodes["n0"]["special"] == 0 + assert H.nodes["n1"]["special"] == 1 + assert H.edges["n0", "n1", 0]["special"] == 2 + assert H.edges["n0", "n1", 1]["special"] == 3 + + def test_multigraph_to_graph(self, tmp_path): + # test converting multigraph to graph if no parallel edges found + G = nx.MultiGraph() + G.add_edges_from([("a", "b", 2), ("b", "c", 3)]) # no multiedges + fname = tmp_path / "test.graphml" + self.writer(G, fname) + H = nx.read_graphml(fname) + assert not H.is_multigraph() + H = nx.read_graphml(fname, force_multigraph=True) + assert H.is_multigraph() + + # add a multiedge + G.add_edge("a", "b", "e-id") + fname = tmp_path / "test.graphml" + self.writer(G, fname) + H = nx.read_graphml(fname) + assert H.is_multigraph() + H = nx.read_graphml(fname, force_multigraph=True) + assert H.is_multigraph() + + def test_write_generate_edge_id_from_attribute(self, tmp_path): + from xml.etree.ElementTree import parse + + G = nx.Graph() + G.add_edges_from([("a", "b"), ("b", "c"), ("a", "c")]) + edge_attributes = {e: str(e) for e in G.edges} + nx.set_edge_attributes(G, edge_attributes, "eid") + fname = tmp_path / "test.graphml" + # set edge_id_from_attribute e.g. "eid" for write_graphml() + self.writer(G, fname, edge_id_from_attribute="eid") + # set edge_id_from_attribute e.g. "eid" for generate_graphml() + generator = nx.generate_graphml(G, edge_id_from_attribute="eid") + + H = nx.read_graphml(fname) + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + # NetworkX adds explicit edge "id" from file as attribute + nx.set_edge_attributes(G, edge_attributes, "id") + assert edges_equal(G.edges(data=True), H.edges(data=True)) + + tree = parse(fname) + children = list(tree.getroot()) + assert len(children) == 2 + edge_ids = [ + edge.attrib["id"] + for edge in tree.getroot().findall( + ".//{http://graphml.graphdrawing.org/xmlns}edge" + ) + ] + # verify edge id value is equal to specified attribute value + assert sorted(edge_ids) == sorted(edge_attributes.values()) + + # check graphml generated from generate_graphml() + data = "".join(generator) + J = nx.parse_graphml(data) + assert sorted(G.nodes()) == sorted(J.nodes()) + assert sorted(G.edges()) == sorted(J.edges()) + # NetworkX adds explicit edge "id" from file as attribute + nx.set_edge_attributes(G, edge_attributes, "id") + assert edges_equal(G.edges(data=True), J.edges(data=True)) + + def test_multigraph_write_generate_edge_id_from_attribute(self, tmp_path): + from xml.etree.ElementTree import parse + + G = nx.MultiGraph() + G.add_edges_from([("a", "b"), ("b", "c"), ("a", "c"), ("a", "b")]) + edge_attributes = {e: str(e) for e in G.edges} + nx.set_edge_attributes(G, edge_attributes, "eid") + fname = tmp_path / "test.graphml" + # set edge_id_from_attribute e.g. "eid" for write_graphml() + self.writer(G, fname, edge_id_from_attribute="eid") + # set edge_id_from_attribute e.g. "eid" for generate_graphml() + generator = nx.generate_graphml(G, edge_id_from_attribute="eid") + + H = nx.read_graphml(fname) + assert H.is_multigraph() + H = nx.read_graphml(fname, force_multigraph=True) + assert H.is_multigraph() + + assert nodes_equal(G.nodes(), H.nodes()) + assert edges_equal(G.edges(), H.edges()) + assert sorted(data.get("eid") for u, v, data in H.edges(data=True)) == sorted( + edge_attributes.values() + ) + # NetworkX uses edge_ids as keys in multigraphs if no key + assert sorted(key for u, v, key in H.edges(keys=True)) == sorted( + edge_attributes.values() + ) + + tree = parse(fname) + children = list(tree.getroot()) + assert len(children) == 2 + edge_ids = [ + edge.attrib["id"] + for edge in tree.getroot().findall( + ".//{http://graphml.graphdrawing.org/xmlns}edge" + ) + ] + # verify edge id value is equal to specified attribute value + assert sorted(edge_ids) == sorted(edge_attributes.values()) + + # check graphml generated from generate_graphml() + graphml_data = "".join(generator) + J = nx.parse_graphml(graphml_data) + assert J.is_multigraph() + + assert nodes_equal(G.nodes(), J.nodes()) + assert edges_equal(G.edges(), J.edges()) + assert sorted(data.get("eid") for u, v, data in J.edges(data=True)) == sorted( + edge_attributes.values() + ) + # NetworkX uses edge_ids as keys in multigraphs if no key + assert sorted(key for u, v, key in J.edges(keys=True)) == sorted( + edge_attributes.values() + ) + + def test_numpy_float64(self, tmp_path): + np = pytest.importorskip("numpy") + wt = np.float64(3.4) + G = nx.Graph([(1, 2, {"weight": wt})]) + fname = tmp_path / "test.graphml" + self.writer(G, fname) + H = nx.read_graphml(fname, node_type=int) + assert G.edges == H.edges + wtG = G[1][2]["weight"] + wtH = H[1][2]["weight"] + assert wtG == pytest.approx(wtH, abs=1e-6) + assert type(wtG) is np.float64 + assert type(wtH) is float + + def test_numpy_float32(self, tmp_path): + np = pytest.importorskip("numpy") + wt = np.float32(3.4) + G = nx.Graph([(1, 2, {"weight": wt})]) + fname = tmp_path / "test.graphml" + self.writer(G, fname) + H = nx.read_graphml(fname, node_type=int) + assert G.edges == H.edges + wtG = G[1][2]["weight"] + wtH = H[1][2]["weight"] + assert wtG == pytest.approx(wtH, abs=1e-6) + assert type(wtG) is np.float32 + assert type(wtH) is float + + def test_numpy_float64_inference(self, tmp_path): + np = pytest.importorskip("numpy") + G = self.attribute_numeric_type_graph + G.edges[("n1", "n1")]["weight"] = np.float64(1.1) + fname = tmp_path / "test.graphml" + self.writer(G, fname, infer_numeric_types=True) + H = nx.read_graphml(fname) + assert G._adj == H._adj + + def test_unicode_attributes(self, tmp_path): + G = nx.Graph() + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + node_type = str + G.add_edge(name1, "Radiohead", foo=name2) + fname = tmp_path / "test.graphml" + self.writer(G, fname) + H = nx.read_graphml(fname, node_type=node_type) + assert G._adj == H._adj + + def test_unicode_escape(self): + # test for handling json escaped strings in python 2 Issue #1880 + import json + + a = {"a": '{"a": "123"}'} # an object with many chars to escape + sa = json.dumps(a) + G = nx.Graph() + G.graph["test"] = sa + fh = io.BytesIO() + self.writer(G, fh) + fh.seek(0) + H = nx.read_graphml(fh) + assert G.graph["test"] == H.graph["test"] + + +class TestXMLGraphML(TestWriteGraphML): + writer = staticmethod(nx.write_graphml_xml) + + @classmethod + def setup_class(cls): + TestWriteGraphML.setup_class() + + +def test_exception_for_unsupported_datatype_node_attr(): + """Test that a detailed exception is raised when an attribute is of a type + not supported by GraphML, e.g. a list""" + pytest.importorskip("lxml.etree") + # node attribute + G = nx.Graph() + G.add_node(0, my_list_attribute=[0, 1, 2]) + fh = io.BytesIO() + with pytest.raises(TypeError, match="GraphML does not support"): + nx.write_graphml(G, fh) + + +def test_exception_for_unsupported_datatype_edge_attr(): + """Test that a detailed exception is raised when an attribute is of a type + not supported by GraphML, e.g. a list""" + pytest.importorskip("lxml.etree") + # edge attribute + G = nx.Graph() + G.add_edge(0, 1, my_list_attribute=[0, 1, 2]) + fh = io.BytesIO() + with pytest.raises(TypeError, match="GraphML does not support"): + nx.write_graphml(G, fh) + + +def test_exception_for_unsupported_datatype_graph_attr(): + """Test that a detailed exception is raised when an attribute is of a type + not supported by GraphML, e.g. a list""" + pytest.importorskip("lxml.etree") + # graph attribute + G = nx.Graph() + G.graph["my_list_attribute"] = [0, 1, 2] + fh = io.BytesIO() + with pytest.raises(TypeError, match="GraphML does not support"): + nx.write_graphml(G, fh) + + +def test_empty_attribute(): + """Tests that a GraphML string with an empty attribute can be parsed + correctly.""" + s = """ + + + + + + aaa + bbb + + + ccc + + + + """ + fh = io.BytesIO(s.encode("UTF-8")) + G = nx.read_graphml(fh) + assert G.nodes["0"] == {"foo": "aaa", "bar": "bbb"} + assert G.nodes["1"] == {"foo": "ccc", "bar": ""} diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_leda.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_leda.py new file mode 100644 index 0000000..8ac5ecc --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_leda.py @@ -0,0 +1,30 @@ +import io + +import networkx as nx + + +class TestLEDA: + def test_parse_leda(self): + data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" + G = nx.parse_leda(data) + G = nx.parse_leda(data.split("\n")) + assert sorted(G.nodes()) == ["v1", "v2", "v3", "v4", "v5"] + assert sorted(G.edges(data=True)) == [ + ("v1", "v2", {"label": "4"}), + ("v1", "v3", {"label": "3"}), + ("v2", "v3", {"label": "2"}), + ("v3", "v4", {"label": "3"}), + ("v3", "v5", {"label": "7"}), + ("v4", "v5", {"label": "6"}), + ("v5", "v1", {"label": "foo"}), + ] + + def test_read_LEDA(self): + fh = io.BytesIO() + data = """#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" + G = nx.parse_leda(data) + fh.write(data.encode("UTF-8")) + fh.seek(0) + Gin = nx.read_leda(fh) + assert sorted(G.nodes()) == sorted(Gin.nodes()) + assert sorted(G.edges()) == sorted(Gin.edges()) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_p2g.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_p2g.py new file mode 100644 index 0000000..e4c50de --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_p2g.py @@ -0,0 +1,62 @@ +import io + +import networkx as nx +from networkx.readwrite.p2g import read_p2g, write_p2g +from networkx.utils import edges_equal + + +class TestP2G: + @classmethod + def setup_class(cls): + cls.G = nx.Graph(name="test") + e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")] + cls.G.add_edges_from(e) + cls.G.add_node("g") + cls.DG = nx.DiGraph(cls.G) + + def test_read_p2g(self): + s = b"""\ +name +3 4 +a +1 2 +b + +c +0 2 +""" + bytesIO = io.BytesIO(s) + G = read_p2g(bytesIO) + assert G.name == "name" + assert sorted(G) == ["a", "b", "c"] + edges = [(str(u), str(v)) for u, v in G.edges()] + assert edges_equal(G.edges(), [("a", "c"), ("a", "b"), ("c", "a"), ("c", "c")]) + + def test_write_p2g(self): + s = b"""foo +3 2 +1 +1 +2 +2 +3 + +""" + fh = io.BytesIO() + G = nx.DiGraph() + G.name = "foo" + G.add_edges_from([(1, 2), (2, 3)]) + write_p2g(G, fh) + fh.seek(0) + r = fh.read() + assert r == s + + def test_write_read_p2g(self): + fh = io.BytesIO() + G = nx.DiGraph() + G.name = "foo" + G.add_edges_from([("a", "b"), ("b", "c")]) + write_p2g(G, fh) + fh.seek(0) + H = read_p2g(fh) + assert edges_equal(G.edges(), H.edges()) diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_pajek.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_pajek.py new file mode 100644 index 0000000..317ebe8 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_pajek.py @@ -0,0 +1,126 @@ +""" +Pajek tests +""" + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +class TestPajek: + @classmethod + def setup_class(cls): + cls.data = """*network Tralala\n*vertices 4\n 1 "A1" 0.0938 0.0896 ellipse x_fact 1 y_fact 1\n 2 "Bb" 0.8188 0.2458 ellipse x_fact 1 y_fact 1\n 3 "C" 0.3688 0.7792 ellipse x_fact 1\n 4 "D2" 0.9583 0.8563 ellipse x_fact 1\n*arcs\n1 1 1 h2 0 w 3 c Blue s 3 a1 -130 k1 0.6 a2 -130 k2 0.6 ap 0.5 l "Bezier loop" lc BlueViolet fos 20 lr 58 lp 0.3 la 360\n2 1 1 h2 0 a1 120 k1 1.3 a2 -120 k2 0.3 ap 25 l "Bezier arc" lphi 270 la 180 lr 19 lp 0.5\n1 2 1 h2 0 a1 40 k1 2.8 a2 30 k2 0.8 ap 25 l "Bezier arc" lphi 90 la 0 lp 0.65\n4 2 -1 h2 0 w 1 k1 -2 k2 250 ap 25 l "Circular arc" c Red lc OrangeRed\n3 4 1 p Dashed h2 0 w 2 c OliveGreen ap 25 l "Straight arc" lc PineGreen\n1 3 1 p Dashed h2 0 w 5 k1 -1 k2 -20 ap 25 l "Oval arc" c Brown lc Black\n3 3 -1 h1 6 w 1 h2 12 k1 -2 k2 -15 ap 0.5 l "Circular loop" c Red lc OrangeRed lphi 270 la 180""" + cls.G = nx.MultiDiGraph() + cls.G.add_nodes_from(["A1", "Bb", "C", "D2"]) + cls.G.add_edges_from( + [ + ("A1", "A1"), + ("A1", "Bb"), + ("A1", "C"), + ("Bb", "A1"), + ("C", "C"), + ("C", "D2"), + ("D2", "Bb"), + ] + ) + + cls.G.graph["name"] = "Tralala" + + def test_parse_pajek_simple(self): + # Example without node positions or shape + data = """*Vertices 2\n1 "1"\n2 "2"\n*Edges\n1 2\n2 1""" + G = nx.parse_pajek(data) + assert sorted(G.nodes()) == ["1", "2"] + assert edges_equal(G.edges(), [("1", "2"), ("1", "2")]) + + def test_parse_pajek(self): + G = nx.parse_pajek(self.data) + assert sorted(G.nodes()) == ["A1", "Bb", "C", "D2"] + assert edges_equal( + G.edges(), + [ + ("A1", "A1"), + ("A1", "Bb"), + ("A1", "C"), + ("Bb", "A1"), + ("C", "C"), + ("C", "D2"), + ("D2", "Bb"), + ], + ) + + def test_parse_pajet_mat(self): + data = """*Vertices 3\n1 "one"\n2 "two"\n3 "three"\n*Matrix\n1 1 0\n0 1 0\n0 1 0\n""" + G = nx.parse_pajek(data) + assert set(G.nodes()) == {"one", "two", "three"} + assert G.nodes["two"] == {"id": "2"} + assert edges_equal( + set(G.edges()), + {("one", "one"), ("two", "one"), ("two", "two"), ("two", "three")}, + ) + + def test_read_pajek(self, tmp_path): + G = nx.parse_pajek(self.data) + # Read data from file + fname = tmp_path / "test.pjk" + with open(fname, "wb") as fh: + fh.write(self.data.encode("UTF-8")) + + Gin = nx.read_pajek(fname) + assert sorted(G.nodes()) == sorted(Gin.nodes()) + assert edges_equal(G.edges(), Gin.edges()) + assert self.G.graph == Gin.graph + for n in G: + assert G.nodes[n] == Gin.nodes[n] + + def test_write_pajek(self): + import io + + G = nx.parse_pajek(self.data) + fh = io.BytesIO() + nx.write_pajek(G, fh) + fh.seek(0) + H = nx.read_pajek(fh) + assert nodes_equal(list(G), list(H)) + assert edges_equal(list(G.edges()), list(H.edges())) + # Graph name is left out for now, therefore it is not tested. + # assert_equal(G.graph, H.graph) + + def test_ignored_attribute(self): + import io + + G = nx.Graph() + fh = io.BytesIO() + G.add_node(1, int_attr=1) + G.add_node(2, empty_attr=" ") + G.add_edge(1, 2, int_attr=2) + G.add_edge(2, 3, empty_attr=" ") + + import warnings + + with warnings.catch_warnings(record=True) as w: + nx.write_pajek(G, fh) + assert len(w) == 4 + + def test_noname(self): + # Make sure we can parse a line such as: *network + # Issue #952 + line = "*network\n" + other_lines = self.data.split("\n")[1:] + data = line + "\n".join(other_lines) + G = nx.parse_pajek(data) + + def test_unicode(self): + import io + + G = nx.Graph() + name1 = chr(2344) + chr(123) + chr(6543) + name2 = chr(5543) + chr(1543) + chr(324) + G.add_edge(name1, "Radiohead", foo=name2) + fh = io.BytesIO() + nx.write_pajek(G, fh) + fh.seek(0) + H = nx.read_pajek(fh) + assert nodes_equal(list(G), list(H)) + assert edges_equal(list(G.edges()), list(H.edges())) + assert G.graph == H.graph diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_sparse6.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_sparse6.py new file mode 100644 index 0000000..52cd271 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_sparse6.py @@ -0,0 +1,166 @@ +from io import BytesIO + +import pytest + +import networkx as nx +from networkx.utils import edges_equal, nodes_equal + + +class TestSparseGraph6: + def test_from_sparse6_bytes(self): + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM" + G = nx.from_sparse6_bytes(data) + assert nodes_equal( + sorted(G.nodes()), + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], + ) + assert edges_equal( + G.edges(), + [ + (0, 1), + (0, 2), + (0, 3), + (1, 12), + (1, 14), + (2, 13), + (2, 15), + (3, 16), + (3, 17), + (4, 7), + (4, 9), + (4, 11), + (5, 6), + (5, 8), + (5, 9), + (6, 10), + (6, 11), + (7, 8), + (7, 10), + (8, 12), + (9, 15), + (10, 14), + (11, 13), + (12, 16), + (13, 17), + (14, 17), + (15, 16), + ], + ) + + def test_from_bytes_multigraph_graph(self): + graph_data = b":An" + G = nx.from_sparse6_bytes(graph_data) + assert isinstance(G, nx.Graph) + multigraph_data = b":Ab" + M = nx.from_sparse6_bytes(multigraph_data) + assert isinstance(M, nx.MultiGraph) + + def test_read_sparse6(self): + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM" + G = nx.from_sparse6_bytes(data) + fh = BytesIO(data) + Gin = nx.read_sparse6(fh) + assert nodes_equal(G.nodes(), Gin.nodes()) + assert edges_equal(G.edges(), Gin.edges()) + + def test_read_many_graph6(self): + # Read many graphs into list + data = b":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM\n:Q___dCfDEdcEgcbEGbFIaJ`JaHN`IM" + fh = BytesIO(data) + glist = nx.read_sparse6(fh) + assert len(glist) == 2 + for G in glist: + assert nodes_equal( + G.nodes(), + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], + ) + + +class TestWriteSparse6: + """Unit tests for writing graphs in the sparse6 format. + + Most of the test cases were checked against the sparse6 encoder in Sage. + + """ + + def test_null_graph(self): + G = nx.null_graph() + result = BytesIO() + nx.write_sparse6(G, result) + assert result.getvalue() == b">>sparse6<<:?\n" + + def test_trivial_graph(self): + G = nx.trivial_graph() + result = BytesIO() + nx.write_sparse6(G, result) + assert result.getvalue() == b">>sparse6<<:@\n" + + def test_empty_graph(self): + G = nx.empty_graph(5) + result = BytesIO() + nx.write_sparse6(G, result) + assert result.getvalue() == b">>sparse6<<:D\n" + + def test_large_empty_graph(self): + G = nx.empty_graph(68) + result = BytesIO() + nx.write_sparse6(G, result) + assert result.getvalue() == b">>sparse6<<:~?@C\n" + + def test_very_large_empty_graph(self): + G = nx.empty_graph(258049) + result = BytesIO() + nx.write_sparse6(G, result) + assert result.getvalue() == b">>sparse6<<:~~???~?@\n" + + def test_complete_graph(self): + G = nx.complete_graph(4) + result = BytesIO() + nx.write_sparse6(G, result) + assert result.getvalue() == b">>sparse6<<:CcKI\n" + + def test_no_header(self): + G = nx.complete_graph(4) + result = BytesIO() + nx.write_sparse6(G, result, header=False) + assert result.getvalue() == b":CcKI\n" + + def test_padding(self): + codes = (b":Cdv", b":DaYn", b":EaYnN", b":FaYnL", b":GaYnLz") + for n, code in enumerate(codes, start=4): + G = nx.path_graph(n) + result = BytesIO() + nx.write_sparse6(G, result, header=False) + assert result.getvalue() == code + b"\n" + + def test_complete_bipartite(self): + G = nx.complete_bipartite_graph(6, 9) + result = BytesIO() + nx.write_sparse6(G, result) + # Compared with sage + expected = b">>sparse6<<:Nk" + b"?G`cJ" * 9 + b"\n" + assert result.getvalue() == expected + + def test_read_write_inverse(self): + for i in list(range(13)) + [31, 47, 62, 63, 64, 72]: + m = min(2 * i, i * i // 2) + g = nx.random_graphs.gnm_random_graph(i, m, seed=i) + gstr = BytesIO() + nx.write_sparse6(g, gstr, header=False) + # Strip the trailing newline. + gstr = gstr.getvalue().rstrip() + g2 = nx.from_sparse6_bytes(gstr) + assert g2.order() == g.order() + assert edges_equal(g2.edges(), g.edges()) + + def test_no_directed_graphs(self): + with pytest.raises(nx.NetworkXNotImplemented): + nx.write_sparse6(nx.DiGraph(), BytesIO()) + + def test_write_path(self, tmp_path): + # Get a valid temporary file name + fullfilename = str(tmp_path / "test.s6") + # file should be closed now, so write_sparse6 can open it + nx.write_sparse6(nx.null_graph(), fullfilename) + with open(fullfilename, mode="rb") as fh: + assert fh.read() == b">>sparse6<<:?\n" diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_text.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_text.py new file mode 100644 index 0000000..b2b7448 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/tests/test_text.py @@ -0,0 +1,1742 @@ +import random +from itertools import product +from textwrap import dedent + +import pytest + +import networkx as nx + + +def test_generate_network_text_forest_directed(): + # Create a directed forest with labels + graph = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph) + for node in graph.nodes: + graph.nodes[node]["label"] = "node_" + chr(ord("a") + node) + + node_target = dedent( + """ + ╙── 0 + ├─╼ 1 + │ ├─╼ 3 + │ └─╼ 4 + └─╼ 2 + ├─╼ 5 + └─╼ 6 + """ + ).strip() + + label_target = dedent( + """ + ╙── node_a + ├─╼ node_b + │ ├─╼ node_d + │ └─╼ node_e + └─╼ node_c + ├─╼ node_f + └─╼ node_g + """ + ).strip() + + # Basic node case + ret = nx.generate_network_text(graph, with_labels=False) + assert "\n".join(ret) == node_target + + # Basic label case + ret = nx.generate_network_text(graph, with_labels=True) + assert "\n".join(ret) == label_target + + +def test_write_network_text_empty_graph(): + def _graph_str(g, **kw): + printbuf = [] + nx.write_network_text(g, printbuf.append, end="", **kw) + return "\n".join(printbuf) + + assert _graph_str(nx.DiGraph()) == "╙" + assert _graph_str(nx.Graph()) == "╙" + assert _graph_str(nx.DiGraph(), ascii_only=True) == "+" + assert _graph_str(nx.Graph(), ascii_only=True) == "+" + + +def test_write_network_text_within_forest_glyph(): + g = nx.DiGraph() + g.add_nodes_from([1, 2, 3, 4]) + g.add_edge(2, 4) + lines = [] + write = lines.append + nx.write_network_text(g, path=write, end="") + nx.write_network_text(g, path=write, ascii_only=True, end="") + text = "\n".join(lines) + target = dedent( + """ + ╟── 1 + ╟── 2 + ╎ └─╼ 4 + ╙── 3 + +-- 1 + +-- 2 + : L-> 4 + +-- 3 + """ + ).strip() + assert text == target + + +def test_generate_network_text_directed_multi_tree(): + tree1 = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph) + tree2 = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph) + forest = nx.disjoint_union_all([tree1, tree2]) + ret = "\n".join(nx.generate_network_text(forest)) + + target = dedent( + """ + ╟── 0 + ╎ ├─╼ 1 + ╎ │ ├─╼ 3 + ╎ │ └─╼ 4 + ╎ └─╼ 2 + ╎ ├─╼ 5 + ╎ └─╼ 6 + ╙── 7 + ├─╼ 8 + │ ├─╼ 10 + │ └─╼ 11 + └─╼ 9 + ├─╼ 12 + └─╼ 13 + """ + ).strip() + assert ret == target + + tree3 = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph) + forest = nx.disjoint_union_all([tree1, tree2, tree3]) + ret = "\n".join(nx.generate_network_text(forest, sources=[0, 14, 7])) + + target = dedent( + """ + ╟── 0 + ╎ ├─╼ 1 + ╎ │ ├─╼ 3 + ╎ │ └─╼ 4 + ╎ └─╼ 2 + ╎ ├─╼ 5 + ╎ └─╼ 6 + ╟── 14 + ╎ ├─╼ 15 + ╎ │ ├─╼ 17 + ╎ │ └─╼ 18 + ╎ └─╼ 16 + ╎ ├─╼ 19 + ╎ └─╼ 20 + ╙── 7 + ├─╼ 8 + │ ├─╼ 10 + │ └─╼ 11 + └─╼ 9 + ├─╼ 12 + └─╼ 13 + """ + ).strip() + assert ret == target + + ret = "\n".join( + nx.generate_network_text(forest, sources=[0, 14, 7], ascii_only=True) + ) + + target = dedent( + """ + +-- 0 + : |-> 1 + : | |-> 3 + : | L-> 4 + : L-> 2 + : |-> 5 + : L-> 6 + +-- 14 + : |-> 15 + : | |-> 17 + : | L-> 18 + : L-> 16 + : |-> 19 + : L-> 20 + +-- 7 + |-> 8 + | |-> 10 + | L-> 11 + L-> 9 + |-> 12 + L-> 13 + """ + ).strip() + assert ret == target + + +def test_generate_network_text_undirected_multi_tree(): + tree1 = nx.balanced_tree(r=2, h=2, create_using=nx.Graph) + tree2 = nx.balanced_tree(r=2, h=2, create_using=nx.Graph) + tree2 = nx.relabel_nodes(tree2, {n: n + len(tree1) for n in tree2.nodes}) + forest = nx.union(tree1, tree2) + ret = "\n".join(nx.generate_network_text(forest, sources=[0, 7])) + + target = dedent( + """ + ╟── 0 + ╎ ├── 1 + ╎ │ ├── 3 + ╎ │ └── 4 + ╎ └── 2 + ╎ ├── 5 + ╎ └── 6 + ╙── 7 + ├── 8 + │ ├── 10 + │ └── 11 + └── 9 + ├── 12 + └── 13 + """ + ).strip() + assert ret == target + + ret = "\n".join(nx.generate_network_text(forest, sources=[0, 7], ascii_only=True)) + + target = dedent( + """ + +-- 0 + : |-- 1 + : | |-- 3 + : | L-- 4 + : L-- 2 + : |-- 5 + : L-- 6 + +-- 7 + |-- 8 + | |-- 10 + | L-- 11 + L-- 9 + |-- 12 + L-- 13 + """ + ).strip() + assert ret == target + + +def test_generate_network_text_forest_undirected(): + # Create a directed forest + graph = nx.balanced_tree(r=2, h=2, create_using=nx.Graph) + + node_target0 = dedent( + """ + ╙── 0 + ├── 1 + │ ├── 3 + │ └── 4 + └── 2 + ├── 5 + └── 6 + """ + ).strip() + + # defined starting point + ret = "\n".join(nx.generate_network_text(graph, sources=[0])) + assert ret == node_target0 + + # defined starting point + node_target2 = dedent( + """ + ╙── 2 + ├── 0 + │ └── 1 + │ ├── 3 + │ └── 4 + ├── 5 + └── 6 + """ + ).strip() + ret = "\n".join(nx.generate_network_text(graph, sources=[2])) + assert ret == node_target2 + + +def test_generate_network_text_overspecified_sources(): + """ + When sources are directly specified, we won't be able to determine when we + are in the last component, so there will always be a trailing, leftmost + pipe. + """ + graph = nx.disjoint_union_all( + [ + nx.balanced_tree(r=2, h=1, create_using=nx.DiGraph), + nx.balanced_tree(r=1, h=2, create_using=nx.DiGraph), + nx.balanced_tree(r=2, h=1, create_using=nx.DiGraph), + ] + ) + + # defined starting point + target1 = dedent( + """ + ╟── 0 + ╎ ├─╼ 1 + ╎ └─╼ 2 + ╟── 3 + ╎ └─╼ 4 + ╎ └─╼ 5 + ╟── 6 + ╎ ├─╼ 7 + ╎ └─╼ 8 + """ + ).strip() + + target2 = dedent( + """ + ╟── 0 + ╎ ├─╼ 1 + ╎ └─╼ 2 + ╟── 3 + ╎ └─╼ 4 + ╎ └─╼ 5 + ╙── 6 + ├─╼ 7 + └─╼ 8 + """ + ).strip() + + got1 = "\n".join(nx.generate_network_text(graph, sources=graph.nodes)) + got2 = "\n".join(nx.generate_network_text(graph)) + assert got1 == target1 + assert got2 == target2 + + +def test_write_network_text_iterative_add_directed_edges(): + """ + Walk through the cases going from a disconnected to fully connected graph + """ + graph = nx.DiGraph() + graph.add_nodes_from([1, 2, 3, 4]) + lines = [] + write = lines.append + write("--- initial state ---") + nx.write_network_text(graph, path=write, end="") + for i, j in product(graph.nodes, graph.nodes): + write(f"--- add_edge({i}, {j}) ---") + graph.add_edge(i, j) + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + # defined starting point + target = dedent( + """ + --- initial state --- + ╟── 1 + ╟── 2 + ╟── 3 + ╙── 4 + --- add_edge(1, 1) --- + ╟── 1 ╾ 1 + ╎ └─╼ ... + ╟── 2 + ╟── 3 + ╙── 4 + --- add_edge(1, 2) --- + ╟── 1 ╾ 1 + ╎ ├─╼ 2 + ╎ └─╼ ... + ╟── 3 + ╙── 4 + --- add_edge(1, 3) --- + ╟── 1 ╾ 1 + ╎ ├─╼ 2 + ╎ ├─╼ 3 + ╎ └─╼ ... + ╙── 4 + --- add_edge(1, 4) --- + ╙── 1 ╾ 1 + ├─╼ 2 + ├─╼ 3 + ├─╼ 4 + └─╼ ... + --- add_edge(2, 1) --- + ╙── 2 ╾ 1 + └─╼ 1 ╾ 1 + ├─╼ 3 + ├─╼ 4 + └─╼ ... + --- add_edge(2, 2) --- + ╙── 1 ╾ 1, 2 + ├─╼ 2 ╾ 2 + │ └─╼ ... + ├─╼ 3 + ├─╼ 4 + └─╼ ... + --- add_edge(2, 3) --- + ╙── 1 ╾ 1, 2 + ├─╼ 2 ╾ 2 + │ ├─╼ 3 ╾ 1 + │ └─╼ ... + ├─╼ 4 + └─╼ ... + --- add_edge(2, 4) --- + ╙── 1 ╾ 1, 2 + ├─╼ 2 ╾ 2 + │ ├─╼ 3 ╾ 1 + │ ├─╼ 4 ╾ 1 + │ └─╼ ... + └─╼ ... + --- add_edge(3, 1) --- + ╙── 2 ╾ 1, 2 + ├─╼ 1 ╾ 1, 3 + │ ├─╼ 3 ╾ 2 + │ │ └─╼ ... + │ ├─╼ 4 ╾ 2 + │ └─╼ ... + └─╼ ... + --- add_edge(3, 2) --- + ╙── 3 ╾ 1, 2 + ├─╼ 1 ╾ 1, 2 + │ ├─╼ 2 ╾ 2, 3 + │ │ ├─╼ 4 ╾ 1 + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(3, 3) --- + ╙── 1 ╾ 1, 2, 3 + ├─╼ 2 ╾ 2, 3 + │ ├─╼ 3 ╾ 1, 3 + │ │ └─╼ ... + │ ├─╼ 4 ╾ 1 + │ └─╼ ... + └─╼ ... + --- add_edge(3, 4) --- + ╙── 1 ╾ 1, 2, 3 + ├─╼ 2 ╾ 2, 3 + │ ├─╼ 3 ╾ 1, 3 + │ │ ├─╼ 4 ╾ 1, 2 + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(4, 1) --- + ╙── 2 ╾ 1, 2, 3 + ├─╼ 1 ╾ 1, 3, 4 + │ ├─╼ 3 ╾ 2, 3 + │ │ ├─╼ 4 ╾ 1, 2 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(4, 2) --- + ╙── 3 ╾ 1, 2, 3 + ├─╼ 1 ╾ 1, 2, 4 + │ ├─╼ 2 ╾ 2, 3, 4 + │ │ ├─╼ 4 ╾ 1, 3 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(4, 3) --- + ╙── 4 ╾ 1, 2, 3 + ├─╼ 1 ╾ 1, 2, 3 + │ ├─╼ 2 ╾ 2, 3, 4 + │ │ ├─╼ 3 ╾ 1, 3, 4 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(4, 4) --- + ╙── 1 ╾ 1, 2, 3, 4 + ├─╼ 2 ╾ 2, 3, 4 + │ ├─╼ 3 ╾ 1, 3, 4 + │ │ ├─╼ 4 ╾ 1, 2, 4 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + """ + ).strip() + assert target == text + + +def test_write_network_text_iterative_add_undirected_edges(): + """ + Walk through the cases going from a disconnected to fully connected graph + """ + graph = nx.Graph() + graph.add_nodes_from([1, 2, 3, 4]) + lines = [] + write = lines.append + write("--- initial state ---") + nx.write_network_text(graph, path=write, end="") + for i, j in product(graph.nodes, graph.nodes): + if i == j: + continue + write(f"--- add_edge({i}, {j}) ---") + graph.add_edge(i, j) + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + --- initial state --- + ╟── 1 + ╟── 2 + ╟── 3 + ╙── 4 + --- add_edge(1, 2) --- + ╟── 3 + ╟── 4 + ╙── 1 + └── 2 + --- add_edge(1, 3) --- + ╟── 4 + ╙── 2 + └── 1 + └── 3 + --- add_edge(1, 4) --- + ╙── 2 + └── 1 + ├── 3 + └── 4 + --- add_edge(2, 1) --- + ╙── 2 + └── 1 + ├── 3 + └── 4 + --- add_edge(2, 3) --- + ╙── 4 + └── 1 + ├── 2 + │ └── 3 ─ 1 + └── ... + --- add_edge(2, 4) --- + ╙── 3 + ├── 1 + │ ├── 2 ─ 3 + │ │ └── 4 ─ 1 + │ └── ... + └── ... + --- add_edge(3, 1) --- + ╙── 3 + ├── 1 + │ ├── 2 ─ 3 + │ │ └── 4 ─ 1 + │ └── ... + └── ... + --- add_edge(3, 2) --- + ╙── 3 + ├── 1 + │ ├── 2 ─ 3 + │ │ └── 4 ─ 1 + │ └── ... + └── ... + --- add_edge(3, 4) --- + ╙── 1 + ├── 2 + │ ├── 3 ─ 1 + │ │ └── 4 ─ 1, 2 + │ └── ... + └── ... + --- add_edge(4, 1) --- + ╙── 1 + ├── 2 + │ ├── 3 ─ 1 + │ │ └── 4 ─ 1, 2 + │ └── ... + └── ... + --- add_edge(4, 2) --- + ╙── 1 + ├── 2 + │ ├── 3 ─ 1 + │ │ └── 4 ─ 1, 2 + │ └── ... + └── ... + --- add_edge(4, 3) --- + ╙── 1 + ├── 2 + │ ├── 3 ─ 1 + │ │ └── 4 ─ 1, 2 + │ └── ... + └── ... + """ + ).strip() + assert target == text + + +def test_write_network_text_iterative_add_random_directed_edges(): + """ + Walk through the cases going from a disconnected to fully connected graph + """ + + rng = random.Random(724466096) + graph = nx.DiGraph() + graph.add_nodes_from([1, 2, 3, 4, 5]) + possible_edges = list(product(graph.nodes, graph.nodes)) + rng.shuffle(possible_edges) + graph.add_edges_from(possible_edges[0:8]) + lines = [] + write = lines.append + write("--- initial state ---") + nx.write_network_text(graph, path=write, end="") + for i, j in possible_edges[8:12]: + write(f"--- add_edge({i}, {j}) ---") + graph.add_edge(i, j) + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + --- initial state --- + ╙── 3 ╾ 5 + └─╼ 2 ╾ 2 + ├─╼ 4 ╾ 4 + │ ├─╼ 5 + │ │ ├─╼ 1 ╾ 1 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(4, 1) --- + ╙── 3 ╾ 5 + └─╼ 2 ╾ 2 + ├─╼ 4 ╾ 4 + │ ├─╼ 5 + │ │ ├─╼ 1 ╾ 1, 4 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(2, 1) --- + ╙── 3 ╾ 5 + └─╼ 2 ╾ 2 + ├─╼ 4 ╾ 4 + │ ├─╼ 5 + │ │ ├─╼ 1 ╾ 1, 4, 2 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(5, 2) --- + ╙── 3 ╾ 5 + └─╼ 2 ╾ 2, 5 + ├─╼ 4 ╾ 4 + │ ├─╼ 5 + │ │ ├─╼ 1 ╾ 1, 4, 2 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- add_edge(1, 5) --- + ╙── 3 ╾ 5 + └─╼ 2 ╾ 2, 5 + ├─╼ 4 ╾ 4 + │ ├─╼ 5 ╾ 1 + │ │ ├─╼ 1 ╾ 1, 4, 2 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + + """ + ).strip() + assert target == text + + +def test_write_network_text_nearly_forest(): + g = nx.DiGraph() + g.add_edge(1, 2) + g.add_edge(1, 5) + g.add_edge(2, 3) + g.add_edge(3, 4) + g.add_edge(5, 6) + g.add_edge(6, 7) + g.add_edge(6, 8) + orig = g.copy() + g.add_edge(1, 8) # forward edge + g.add_edge(4, 2) # back edge + g.add_edge(6, 3) # cross edge + lines = [] + write = lines.append + write("--- directed case ---") + nx.write_network_text(orig, path=write, end="") + write("--- add (1, 8), (4, 2), (6, 3) ---") + nx.write_network_text(g, path=write, end="") + write("--- undirected case ---") + nx.write_network_text(orig.to_undirected(), path=write, sources=[1], end="") + write("--- add (1, 8), (4, 2), (6, 3) ---") + nx.write_network_text(g.to_undirected(), path=write, sources=[1], end="") + text = "\n".join(lines) + target = dedent( + """ + --- directed case --- + ╙── 1 + ├─╼ 2 + │ └─╼ 3 + │ └─╼ 4 + └─╼ 5 + └─╼ 6 + ├─╼ 7 + └─╼ 8 + --- add (1, 8), (4, 2), (6, 3) --- + ╙── 1 + ├─╼ 2 ╾ 4 + │ └─╼ 3 ╾ 6 + │ └─╼ 4 + │ └─╼ ... + ├─╼ 5 + │ └─╼ 6 + │ ├─╼ 7 + │ ├─╼ 8 ╾ 1 + │ └─╼ ... + └─╼ ... + --- undirected case --- + ╙── 1 + ├── 2 + │ └── 3 + │ └── 4 + └── 5 + └── 6 + ├── 7 + └── 8 + --- add (1, 8), (4, 2), (6, 3) --- + ╙── 1 + ├── 2 + │ ├── 3 + │ │ ├── 4 ─ 2 + │ │ └── 6 + │ │ ├── 5 ─ 1 + │ │ ├── 7 + │ │ └── 8 ─ 1 + │ └── ... + └── ... + """ + ).strip() + assert target == text + + +def test_write_network_text_complete_graph_ascii_only(): + graph = nx.generators.complete_graph(5, create_using=nx.DiGraph) + lines = [] + write = lines.append + write("--- directed case ---") + nx.write_network_text(graph, path=write, ascii_only=True, end="") + write("--- undirected case ---") + nx.write_network_text(graph.to_undirected(), path=write, ascii_only=True, end="") + text = "\n".join(lines) + target = dedent( + """ + --- directed case --- + +-- 0 <- 1, 2, 3, 4 + |-> 1 <- 2, 3, 4 + | |-> 2 <- 0, 3, 4 + | | |-> 3 <- 0, 1, 4 + | | | |-> 4 <- 0, 1, 2 + | | | | L-> ... + | | | L-> ... + | | L-> ... + | L-> ... + L-> ... + --- undirected case --- + +-- 0 + |-- 1 + | |-- 2 - 0 + | | |-- 3 - 0, 1 + | | | L-- 4 - 0, 1, 2 + | | L-- ... + | L-- ... + L-- ... + """ + ).strip() + assert target == text + + +def test_write_network_text_with_labels(): + graph = nx.generators.complete_graph(5, create_using=nx.DiGraph) + for n in graph.nodes: + graph.nodes[n]["label"] = f"Node(n={n})" + lines = [] + write = lines.append + nx.write_network_text(graph, path=write, with_labels=True, ascii_only=False, end="") + text = "\n".join(lines) + # Non trees with labels can get somewhat out of hand with network text + # because we need to immediately show every non-tree edge to the right + target = dedent( + """ + ╙── Node(n=0) ╾ Node(n=1), Node(n=2), Node(n=3), Node(n=4) + ├─╼ Node(n=1) ╾ Node(n=2), Node(n=3), Node(n=4) + │ ├─╼ Node(n=2) ╾ Node(n=0), Node(n=3), Node(n=4) + │ │ ├─╼ Node(n=3) ╾ Node(n=0), Node(n=1), Node(n=4) + │ │ │ ├─╼ Node(n=4) ╾ Node(n=0), Node(n=1), Node(n=2) + │ │ │ │ └─╼ ... + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + """ + ).strip() + assert target == text + + +def test_write_network_text_complete_graphs(): + lines = [] + write = lines.append + for k in [0, 1, 2, 3, 4, 5]: + g = nx.generators.complete_graph(k) + write(f"--- undirected k={k} ---") + nx.write_network_text(g, path=write, end="") + + for k in [0, 1, 2, 3, 4, 5]: + g = nx.generators.complete_graph(k, nx.DiGraph) + write(f"--- directed k={k} ---") + nx.write_network_text(g, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + --- undirected k=0 --- + ╙ + --- undirected k=1 --- + ╙── 0 + --- undirected k=2 --- + ╙── 0 + └── 1 + --- undirected k=3 --- + ╙── 0 + ├── 1 + │ └── 2 ─ 0 + └── ... + --- undirected k=4 --- + ╙── 0 + ├── 1 + │ ├── 2 ─ 0 + │ │ └── 3 ─ 0, 1 + │ └── ... + └── ... + --- undirected k=5 --- + ╙── 0 + ├── 1 + │ ├── 2 ─ 0 + │ │ ├── 3 ─ 0, 1 + │ │ │ └── 4 ─ 0, 1, 2 + │ │ └── ... + │ └── ... + └── ... + --- directed k=0 --- + ╙ + --- directed k=1 --- + ╙── 0 + --- directed k=2 --- + ╙── 0 ╾ 1 + └─╼ 1 + └─╼ ... + --- directed k=3 --- + ╙── 0 ╾ 1, 2 + ├─╼ 1 ╾ 2 + │ ├─╼ 2 ╾ 0 + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- directed k=4 --- + ╙── 0 ╾ 1, 2, 3 + ├─╼ 1 ╾ 2, 3 + │ ├─╼ 2 ╾ 0, 3 + │ │ ├─╼ 3 ╾ 0, 1 + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- directed k=5 --- + ╙── 0 ╾ 1, 2, 3, 4 + ├─╼ 1 ╾ 2, 3, 4 + │ ├─╼ 2 ╾ 0, 3, 4 + │ │ ├─╼ 3 ╾ 0, 1, 4 + │ │ │ ├─╼ 4 ╾ 0, 1, 2 + │ │ │ │ └─╼ ... + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + """ + ).strip() + assert target == text + + +def test_write_network_text_multiple_sources(): + g = nx.DiGraph() + g.add_edge(1, 2) + g.add_edge(1, 3) + g.add_edge(2, 4) + g.add_edge(3, 5) + g.add_edge(3, 6) + g.add_edge(5, 4) + g.add_edge(4, 1) + g.add_edge(1, 5) + lines = [] + write = lines.append + # Use each node as the starting point to demonstrate how the representation + # changes. + nodes = sorted(g.nodes()) + for n in nodes: + write(f"--- source node: {n} ---") + nx.write_network_text(g, path=write, sources=[n], end="") + text = "\n".join(lines) + target = dedent( + """ + --- source node: 1 --- + ╙── 1 ╾ 4 + ├─╼ 2 + │ └─╼ 4 ╾ 5 + │ └─╼ ... + ├─╼ 3 + │ ├─╼ 5 ╾ 1 + │ │ └─╼ ... + │ └─╼ 6 + └─╼ ... + --- source node: 2 --- + ╙── 2 ╾ 1 + └─╼ 4 ╾ 5 + └─╼ 1 + ├─╼ 3 + │ ├─╼ 5 ╾ 1 + │ │ └─╼ ... + │ └─╼ 6 + └─╼ ... + --- source node: 3 --- + ╙── 3 ╾ 1 + ├─╼ 5 ╾ 1 + │ └─╼ 4 ╾ 2 + │ └─╼ 1 + │ ├─╼ 2 + │ │ └─╼ ... + │ └─╼ ... + └─╼ 6 + --- source node: 4 --- + ╙── 4 ╾ 2, 5 + └─╼ 1 + ├─╼ 2 + │ └─╼ ... + ├─╼ 3 + │ ├─╼ 5 ╾ 1 + │ │ └─╼ ... + │ └─╼ 6 + └─╼ ... + --- source node: 5 --- + ╙── 5 ╾ 3, 1 + └─╼ 4 ╾ 2 + └─╼ 1 + ├─╼ 2 + │ └─╼ ... + ├─╼ 3 + │ ├─╼ 6 + │ └─╼ ... + └─╼ ... + --- source node: 6 --- + ╙── 6 ╾ 3 + """ + ).strip() + assert target == text + + +def test_write_network_text_star_graph(): + graph = nx.star_graph(5, create_using=nx.Graph) + lines = [] + write = lines.append + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + ╙── 1 + └── 0 + ├── 2 + ├── 3 + ├── 4 + └── 5 + """ + ).strip() + assert target == text + + +def test_write_network_text_path_graph(): + graph = nx.path_graph(3, create_using=nx.Graph) + lines = [] + write = lines.append + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + ╙── 0 + └── 1 + └── 2 + """ + ).strip() + assert target == text + + +def test_write_network_text_lollipop_graph(): + graph = nx.lollipop_graph(4, 2, create_using=nx.Graph) + lines = [] + write = lines.append + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + ╙── 5 + └── 4 + └── 3 + ├── 0 + │ ├── 1 ─ 3 + │ │ └── 2 ─ 0, 3 + │ └── ... + └── ... + """ + ).strip() + assert target == text + + +def test_write_network_text_wheel_graph(): + graph = nx.wheel_graph(7, create_using=nx.Graph) + lines = [] + write = lines.append + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + ╙── 1 + ├── 0 + │ ├── 2 ─ 1 + │ │ └── 3 ─ 0 + │ │ └── 4 ─ 0 + │ │ └── 5 ─ 0 + │ │ └── 6 ─ 0, 1 + │ └── ... + └── ... + """ + ).strip() + assert target == text + + +def test_write_network_text_circular_ladder_graph(): + graph = nx.circular_ladder_graph(4, create_using=nx.Graph) + lines = [] + write = lines.append + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + ╙── 0 + ├── 1 + │ ├── 2 + │ │ ├── 3 ─ 0 + │ │ │ └── 7 + │ │ │ ├── 6 ─ 2 + │ │ │ │ └── 5 ─ 1 + │ │ │ │ └── 4 ─ 0, 7 + │ │ │ └── ... + │ │ └── ... + │ └── ... + └── ... + """ + ).strip() + assert target == text + + +def test_write_network_text_dorogovtsev_goltsev_mendes_graph(): + graph = nx.dorogovtsev_goltsev_mendes_graph(4, create_using=nx.Graph) + lines = [] + write = lines.append + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + ╙── 15 + ├── 0 + │ ├── 1 ─ 15 + │ │ ├── 2 ─ 0 + │ │ │ ├── 4 ─ 0 + │ │ │ │ ├── 9 ─ 0 + │ │ │ │ │ ├── 22 ─ 0 + │ │ │ │ │ └── 38 ─ 4 + │ │ │ │ ├── 13 ─ 2 + │ │ │ │ │ ├── 34 ─ 2 + │ │ │ │ │ └── 39 ─ 4 + │ │ │ │ ├── 18 ─ 0 + │ │ │ │ ├── 30 ─ 2 + │ │ │ │ └── ... + │ │ │ ├── 5 ─ 1 + │ │ │ │ ├── 12 ─ 1 + │ │ │ │ │ ├── 29 ─ 1 + │ │ │ │ │ └── 40 ─ 5 + │ │ │ │ ├── 14 ─ 2 + │ │ │ │ │ ├── 35 ─ 2 + │ │ │ │ │ └── 41 ─ 5 + │ │ │ │ ├── 25 ─ 1 + │ │ │ │ ├── 31 ─ 2 + │ │ │ │ └── ... + │ │ │ ├── 7 ─ 0 + │ │ │ │ ├── 20 ─ 0 + │ │ │ │ └── 32 ─ 2 + │ │ │ ├── 10 ─ 1 + │ │ │ │ ├── 27 ─ 1 + │ │ │ │ └── 33 ─ 2 + │ │ │ ├── 16 ─ 0 + │ │ │ ├── 23 ─ 1 + │ │ │ └── ... + │ │ ├── 3 ─ 0 + │ │ │ ├── 8 ─ 0 + │ │ │ │ ├── 21 ─ 0 + │ │ │ │ └── 36 ─ 3 + │ │ │ ├── 11 ─ 1 + │ │ │ │ ├── 28 ─ 1 + │ │ │ │ └── 37 ─ 3 + │ │ │ ├── 17 ─ 0 + │ │ │ ├── 24 ─ 1 + │ │ │ └── ... + │ │ ├── 6 ─ 0 + │ │ │ ├── 19 ─ 0 + │ │ │ └── 26 ─ 1 + │ │ └── ... + │ └── ... + └── ... + """ + ).strip() + assert target == text + + +def test_write_network_text_tree_max_depth(): + orig = nx.balanced_tree(r=1, h=3, create_using=nx.DiGraph) + lines = [] + write = lines.append + write("--- directed case, max_depth=0 ---") + nx.write_network_text(orig, path=write, end="", max_depth=0) + write("--- directed case, max_depth=1 ---") + nx.write_network_text(orig, path=write, end="", max_depth=1) + write("--- directed case, max_depth=2 ---") + nx.write_network_text(orig, path=write, end="", max_depth=2) + write("--- directed case, max_depth=3 ---") + nx.write_network_text(orig, path=write, end="", max_depth=3) + write("--- directed case, max_depth=4 ---") + nx.write_network_text(orig, path=write, end="", max_depth=4) + write("--- undirected case, max_depth=0 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=0) + write("--- undirected case, max_depth=1 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=1) + write("--- undirected case, max_depth=2 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=2) + write("--- undirected case, max_depth=3 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=3) + write("--- undirected case, max_depth=4 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=4) + text = "\n".join(lines) + target = dedent( + """ + --- directed case, max_depth=0 --- + ╙ ... + --- directed case, max_depth=1 --- + ╙── 0 + └─╼ ... + --- directed case, max_depth=2 --- + ╙── 0 + └─╼ 1 + └─╼ ... + --- directed case, max_depth=3 --- + ╙── 0 + └─╼ 1 + └─╼ 2 + └─╼ ... + --- directed case, max_depth=4 --- + ╙── 0 + └─╼ 1 + └─╼ 2 + └─╼ 3 + --- undirected case, max_depth=0 --- + ╙ ... + --- undirected case, max_depth=1 --- + ╙── 0 ─ 1 + └── ... + --- undirected case, max_depth=2 --- + ╙── 0 + └── 1 ─ 2 + └── ... + --- undirected case, max_depth=3 --- + ╙── 0 + └── 1 + └── 2 ─ 3 + └── ... + --- undirected case, max_depth=4 --- + ╙── 0 + └── 1 + └── 2 + └── 3 + """ + ).strip() + assert target == text + + +def test_write_network_text_graph_max_depth(): + orig = nx.erdos_renyi_graph(10, 0.15, directed=True, seed=40392) + lines = [] + write = lines.append + write("--- directed case, max_depth=None ---") + nx.write_network_text(orig, path=write, end="", max_depth=None) + write("--- directed case, max_depth=0 ---") + nx.write_network_text(orig, path=write, end="", max_depth=0) + write("--- directed case, max_depth=1 ---") + nx.write_network_text(orig, path=write, end="", max_depth=1) + write("--- directed case, max_depth=2 ---") + nx.write_network_text(orig, path=write, end="", max_depth=2) + write("--- directed case, max_depth=3 ---") + nx.write_network_text(orig, path=write, end="", max_depth=3) + write("--- undirected case, max_depth=None ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=None) + write("--- undirected case, max_depth=0 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=0) + write("--- undirected case, max_depth=1 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=1) + write("--- undirected case, max_depth=2 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=2) + write("--- undirected case, max_depth=3 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=3) + text = "\n".join(lines) + target = dedent( + """ + --- directed case, max_depth=None --- + ╟── 4 + ╎ ├─╼ 0 ╾ 3 + ╎ ├─╼ 5 ╾ 7 + ╎ │ └─╼ 3 + ╎ │ ├─╼ 1 ╾ 9 + ╎ │ │ └─╼ 9 ╾ 6 + ╎ │ │ ├─╼ 6 + ╎ │ │ │ └─╼ ... + ╎ │ │ ├─╼ 7 ╾ 4 + ╎ │ │ │ ├─╼ 2 + ╎ │ │ │ └─╼ ... + ╎ │ │ └─╼ ... + ╎ │ └─╼ ... + ╎ └─╼ ... + ╙── 8 + --- directed case, max_depth=0 --- + ╙ ... + --- directed case, max_depth=1 --- + ╟── 4 + ╎ └─╼ ... + ╙── 8 + --- directed case, max_depth=2 --- + ╟── 4 + ╎ ├─╼ 0 ╾ 3 + ╎ ├─╼ 5 ╾ 7 + ╎ │ └─╼ ... + ╎ └─╼ 7 ╾ 9 + ╎ └─╼ ... + ╙── 8 + --- directed case, max_depth=3 --- + ╟── 4 + ╎ ├─╼ 0 ╾ 3 + ╎ ├─╼ 5 ╾ 7 + ╎ │ └─╼ 3 + ╎ │ └─╼ ... + ╎ └─╼ 7 ╾ 9 + ╎ ├─╼ 2 + ╎ └─╼ ... + ╙── 8 + --- undirected case, max_depth=None --- + ╟── 8 + ╙── 2 + └── 7 + ├── 4 + │ ├── 0 + │ │ └── 3 + │ │ ├── 1 + │ │ │ └── 9 ─ 7 + │ │ │ └── 6 + │ │ └── 5 ─ 4, 7 + │ └── ... + └── ... + --- undirected case, max_depth=0 --- + ╙ ... + --- undirected case, max_depth=1 --- + ╟── 8 + ╙── 2 ─ 7 + └── ... + --- undirected case, max_depth=2 --- + ╟── 8 + ╙── 2 + └── 7 ─ 4, 5, 9 + └── ... + --- undirected case, max_depth=3 --- + ╟── 8 + ╙── 2 + └── 7 + ├── 4 ─ 0, 5 + │ └── ... + ├── 5 ─ 4, 3 + │ └── ... + └── 9 ─ 1, 6 + └── ... + """ + ).strip() + assert target == text + + +def test_write_network_text_clique_max_depth(): + orig = nx.complete_graph(5, nx.DiGraph) + lines = [] + write = lines.append + write("--- directed case, max_depth=None ---") + nx.write_network_text(orig, path=write, end="", max_depth=None) + write("--- directed case, max_depth=0 ---") + nx.write_network_text(orig, path=write, end="", max_depth=0) + write("--- directed case, max_depth=1 ---") + nx.write_network_text(orig, path=write, end="", max_depth=1) + write("--- directed case, max_depth=2 ---") + nx.write_network_text(orig, path=write, end="", max_depth=2) + write("--- directed case, max_depth=3 ---") + nx.write_network_text(orig, path=write, end="", max_depth=3) + write("--- undirected case, max_depth=None ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=None) + write("--- undirected case, max_depth=0 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=0) + write("--- undirected case, max_depth=1 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=1) + write("--- undirected case, max_depth=2 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=2) + write("--- undirected case, max_depth=3 ---") + nx.write_network_text(orig.to_undirected(), path=write, end="", max_depth=3) + text = "\n".join(lines) + target = dedent( + """ + --- directed case, max_depth=None --- + ╙── 0 ╾ 1, 2, 3, 4 + ├─╼ 1 ╾ 2, 3, 4 + │ ├─╼ 2 ╾ 0, 3, 4 + │ │ ├─╼ 3 ╾ 0, 1, 4 + │ │ │ ├─╼ 4 ╾ 0, 1, 2 + │ │ │ │ └─╼ ... + │ │ │ └─╼ ... + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- directed case, max_depth=0 --- + ╙ ... + --- directed case, max_depth=1 --- + ╙── 0 ╾ 1, 2, 3, 4 + └─╼ ... + --- directed case, max_depth=2 --- + ╙── 0 ╾ 1, 2, 3, 4 + ├─╼ 1 ╾ 2, 3, 4 + │ └─╼ ... + ├─╼ 2 ╾ 1, 3, 4 + │ └─╼ ... + ├─╼ 3 ╾ 1, 2, 4 + │ └─╼ ... + └─╼ 4 ╾ 1, 2, 3 + └─╼ ... + --- directed case, max_depth=3 --- + ╙── 0 ╾ 1, 2, 3, 4 + ├─╼ 1 ╾ 2, 3, 4 + │ ├─╼ 2 ╾ 0, 3, 4 + │ │ └─╼ ... + │ ├─╼ 3 ╾ 0, 2, 4 + │ │ └─╼ ... + │ ├─╼ 4 ╾ 0, 2, 3 + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + --- undirected case, max_depth=None --- + ╙── 0 + ├── 1 + │ ├── 2 ─ 0 + │ │ ├── 3 ─ 0, 1 + │ │ │ └── 4 ─ 0, 1, 2 + │ │ └── ... + │ └── ... + └── ... + --- undirected case, max_depth=0 --- + ╙ ... + --- undirected case, max_depth=1 --- + ╙── 0 ─ 1, 2, 3, 4 + └── ... + --- undirected case, max_depth=2 --- + ╙── 0 + ├── 1 ─ 2, 3, 4 + │ └── ... + ├── 2 ─ 1, 3, 4 + │ └── ... + ├── 3 ─ 1, 2, 4 + │ └── ... + └── 4 ─ 1, 2, 3 + --- undirected case, max_depth=3 --- + ╙── 0 + ├── 1 + │ ├── 2 ─ 0, 3, 4 + │ │ └── ... + │ ├── 3 ─ 0, 2, 4 + │ │ └── ... + │ └── 4 ─ 0, 2, 3 + └── ... + """ + ).strip() + assert target == text + + +def test_write_network_text_custom_label(): + # Create a directed forest with labels + graph = nx.erdos_renyi_graph(5, 0.4, directed=True, seed=359222358) + for node in graph.nodes: + graph.nodes[node]["label"] = f"Node({node})" + graph.nodes[node]["chr"] = chr(node + ord("a") - 1) + if node % 2 == 0: + graph.nodes[node]["part"] = chr(node + ord("a")) + + lines = [] + write = lines.append + write("--- when with_labels=True, uses the 'label' attr ---") + nx.write_network_text(graph, path=write, with_labels=True, end="", max_depth=None) + write("--- when with_labels=False, uses str(node) value ---") + nx.write_network_text(graph, path=write, with_labels=False, end="", max_depth=None) + write("--- when with_labels is a string, use that attr ---") + nx.write_network_text(graph, path=write, with_labels="chr", end="", max_depth=None) + write("--- fallback to str(node) when the attr does not exist ---") + nx.write_network_text(graph, path=write, with_labels="part", end="", max_depth=None) + + text = "\n".join(lines) + target = dedent( + """ + --- when with_labels=True, uses the 'label' attr --- + ╙── Node(1) + └─╼ Node(3) ╾ Node(2) + ├─╼ Node(0) + │ ├─╼ Node(2) ╾ Node(3), Node(4) + │ │ └─╼ ... + │ └─╼ Node(4) + │ └─╼ ... + └─╼ ... + --- when with_labels=False, uses str(node) value --- + ╙── 1 + └─╼ 3 ╾ 2 + ├─╼ 0 + │ ├─╼ 2 ╾ 3, 4 + │ │ └─╼ ... + │ └─╼ 4 + │ └─╼ ... + └─╼ ... + --- when with_labels is a string, use that attr --- + ╙── a + └─╼ c ╾ b + ├─╼ ` + │ ├─╼ b ╾ c, d + │ │ └─╼ ... + │ └─╼ d + │ └─╼ ... + └─╼ ... + --- fallback to str(node) when the attr does not exist --- + ╙── 1 + └─╼ 3 ╾ c + ├─╼ a + │ ├─╼ c ╾ 3, e + │ │ └─╼ ... + │ └─╼ e + │ └─╼ ... + └─╼ ... + """ + ).strip() + assert target == text + + +def test_write_network_text_vertical_chains(): + graph1 = nx.lollipop_graph(4, 2, create_using=nx.Graph) + graph1.add_edge(0, -1) + graph1.add_edge(-1, -2) + graph1.add_edge(-2, -3) + + graph2 = graph1.to_directed() + graph2.remove_edges_from([(u, v) for u, v in graph2.edges if v > u]) + + lines = [] + write = lines.append + write("--- Undirected UTF ---") + nx.write_network_text(graph1, path=write, end="", vertical_chains=True) + write("--- Undirected ASCI ---") + nx.write_network_text( + graph1, path=write, end="", vertical_chains=True, ascii_only=True + ) + write("--- Directed UTF ---") + nx.write_network_text(graph2, path=write, end="", vertical_chains=True) + write("--- Directed ASCI ---") + nx.write_network_text( + graph2, path=write, end="", vertical_chains=True, ascii_only=True + ) + + text = "\n".join(lines) + target = dedent( + """ + --- Undirected UTF --- + ╙── 5 + │ + 4 + │ + 3 + ├── 0 + │ ├── 1 ─ 3 + │ │ │ + │ │ 2 ─ 0, 3 + │ ├── -1 + │ │ │ + │ │ -2 + │ │ │ + │ │ -3 + │ └── ... + └── ... + --- Undirected ASCI --- + +-- 5 + | + 4 + | + 3 + |-- 0 + | |-- 1 - 3 + | | | + | | 2 - 0, 3 + | |-- -1 + | | | + | | -2 + | | | + | | -3 + | L-- ... + L-- ... + --- Directed UTF --- + ╙── 5 + ╽ + 4 + ╽ + 3 + ├─╼ 0 ╾ 1, 2 + │ ╽ + │ -1 + │ ╽ + │ -2 + │ ╽ + │ -3 + ├─╼ 1 ╾ 2 + │ └─╼ ... + └─╼ 2 + └─╼ ... + --- Directed ASCI --- + +-- 5 + ! + 4 + ! + 3 + |-> 0 <- 1, 2 + | ! + | -1 + | ! + | -2 + | ! + | -3 + |-> 1 <- 2 + | L-> ... + L-> 2 + L-> ... + """ + ).strip() + assert target == text + + +def test_collapse_directed(): + graph = nx.balanced_tree(r=2, h=3, create_using=nx.DiGraph) + lines = [] + write = lines.append + write("--- Original ---") + nx.write_network_text(graph, path=write, end="") + graph.nodes[1]["collapse"] = True + write("--- Collapse Node 1 ---") + nx.write_network_text(graph, path=write, end="") + write("--- Add alternate path (5, 3) to collapsed zone") + graph.add_edge(5, 3) + nx.write_network_text(graph, path=write, end="") + write("--- Collapse Node 0 ---") + graph.nodes[0]["collapse"] = True + nx.write_network_text(graph, path=write, end="") + text = "\n".join(lines) + target = dedent( + """ + --- Original --- + ╙── 0 + ├─╼ 1 + │ ├─╼ 3 + │ │ ├─╼ 7 + │ │ └─╼ 8 + │ └─╼ 4 + │ ├─╼ 9 + │ └─╼ 10 + └─╼ 2 + ├─╼ 5 + │ ├─╼ 11 + │ └─╼ 12 + └─╼ 6 + ├─╼ 13 + └─╼ 14 + --- Collapse Node 1 --- + ╙── 0 + ├─╼ 1 + │ └─╼ ... + └─╼ 2 + ├─╼ 5 + │ ├─╼ 11 + │ └─╼ 12 + └─╼ 6 + ├─╼ 13 + └─╼ 14 + --- Add alternate path (5, 3) to collapsed zone + ╙── 0 + ├─╼ 1 + │ └─╼ ... + └─╼ 2 + ├─╼ 5 + │ ├─╼ 11 + │ ├─╼ 12 + │ └─╼ 3 ╾ 1 + │ ├─╼ 7 + │ └─╼ 8 + └─╼ 6 + ├─╼ 13 + └─╼ 14 + --- Collapse Node 0 --- + ╙── 0 + └─╼ ... + """ + ).strip() + assert target == text + + +def test_collapse_undirected(): + graph = nx.balanced_tree(r=2, h=3, create_using=nx.Graph) + lines = [] + write = lines.append + write("--- Original ---") + nx.write_network_text(graph, path=write, end="", sources=[0]) + graph.nodes[1]["collapse"] = True + write("--- Collapse Node 1 ---") + nx.write_network_text(graph, path=write, end="", sources=[0]) + write("--- Add alternate path (5, 3) to collapsed zone") + graph.add_edge(5, 3) + nx.write_network_text(graph, path=write, end="", sources=[0]) + write("--- Collapse Node 0 ---") + graph.nodes[0]["collapse"] = True + nx.write_network_text(graph, path=write, end="", sources=[0]) + text = "\n".join(lines) + target = dedent( + """ + --- Original --- + ╙── 0 + ├── 1 + │ ├── 3 + │ │ ├── 7 + │ │ └── 8 + │ └── 4 + │ ├── 9 + │ └── 10 + └── 2 + ├── 5 + │ ├── 11 + │ └── 12 + └── 6 + ├── 13 + └── 14 + --- Collapse Node 1 --- + ╙── 0 + ├── 1 ─ 3, 4 + │ └── ... + └── 2 + ├── 5 + │ ├── 11 + │ └── 12 + └── 6 + ├── 13 + └── 14 + --- Add alternate path (5, 3) to collapsed zone + ╙── 0 + ├── 1 ─ 3, 4 + │ └── ... + └── 2 + ├── 5 + │ ├── 11 + │ ├── 12 + │ └── 3 ─ 1 + │ ├── 7 + │ └── 8 + └── 6 + ├── 13 + └── 14 + --- Collapse Node 0 --- + ╙── 0 ─ 1, 2 + └── ... + """ + ).strip() + assert target == text + + +def generate_test_graphs(): + """ + Generate a gauntlet of different test graphs with different properties + """ + import random + + rng = random.Random(976689776) + num_randomized = 3 + + for directed in [0, 1]: + cls = nx.DiGraph if directed else nx.Graph + + for num_nodes in range(17): + # Disconnected graph + graph = cls() + graph.add_nodes_from(range(num_nodes)) + yield graph + + # Randomize graphs + if num_nodes > 0: + for p in [0.1, 0.3, 0.5, 0.7, 0.9]: + for seed in range(num_randomized): + graph = nx.erdos_renyi_graph( + num_nodes, p, directed=directed, seed=rng + ) + yield graph + + yield nx.complete_graph(num_nodes, cls) + + yield nx.path_graph(3, create_using=cls) + yield nx.balanced_tree(r=1, h=3, create_using=cls) + if not directed: + yield nx.circular_ladder_graph(4, create_using=cls) + yield nx.star_graph(5, create_using=cls) + yield nx.lollipop_graph(4, 2, create_using=cls) + yield nx.wheel_graph(7, create_using=cls) + yield nx.dorogovtsev_goltsev_mendes_graph(4, create_using=cls) + + +@pytest.mark.parametrize( + ("vertical_chains", "ascii_only"), + tuple( + [ + (vertical_chains, ascii_only) + for vertical_chains in [0, 1] + for ascii_only in [0, 1] + ] + ), +) +def test_network_text_round_trip(vertical_chains, ascii_only): + """ + Write the graph to network text format, then parse it back in, assert it is + the same as the original graph. Passing this test is strong validation of + both the format generator and parser. + """ + from networkx.readwrite.text import _parse_network_text + + for graph in generate_test_graphs(): + graph = nx.relabel_nodes(graph, {n: str(n) for n in graph.nodes}) + lines = list( + nx.generate_network_text( + graph, vertical_chains=vertical_chains, ascii_only=ascii_only + ) + ) + new = _parse_network_text(lines) + try: + assert new.nodes == graph.nodes + assert new.edges == graph.edges + except Exception: + nx.write_network_text(graph) + raise diff --git a/.venv/lib/python3.12/site-packages/networkx/readwrite/text.py b/.venv/lib/python3.12/site-packages/networkx/readwrite/text.py new file mode 100644 index 0000000..6fce220 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/readwrite/text.py @@ -0,0 +1,851 @@ +""" +Text-based visual representations of graphs +""" + +import sys +from collections import defaultdict + +import networkx as nx +from networkx.utils import open_file + +__all__ = ["generate_network_text", "write_network_text"] + + +class BaseGlyphs: + @classmethod + def as_dict(cls): + return { + a: getattr(cls, a) + for a in dir(cls) + if not a.startswith("_") and a != "as_dict" + } + + +class AsciiBaseGlyphs(BaseGlyphs): + empty: str = "+" + newtree_last: str = "+-- " + newtree_mid: str = "+-- " + endof_forest: str = " " + within_forest: str = ": " + within_tree: str = "| " + + +class AsciiDirectedGlyphs(AsciiBaseGlyphs): + last: str = "L-> " + mid: str = "|-> " + backedge: str = "<-" + vertical_edge: str = "!" + + +class AsciiUndirectedGlyphs(AsciiBaseGlyphs): + last: str = "L-- " + mid: str = "|-- " + backedge: str = "-" + vertical_edge: str = "|" + + +class UtfBaseGlyphs(BaseGlyphs): + # Notes on available box and arrow characters + # https://en.wikipedia.org/wiki/Box-drawing_character + # https://stackoverflow.com/questions/2701192/triangle-arrow + empty: str = "╙" + newtree_last: str = "╙── " + newtree_mid: str = "╟── " + endof_forest: str = " " + within_forest: str = "╎ " + within_tree: str = "│ " + + +class UtfDirectedGlyphs(UtfBaseGlyphs): + last: str = "└─╼ " + mid: str = "├─╼ " + backedge: str = "╾" + vertical_edge: str = "╽" + + +class UtfUndirectedGlyphs(UtfBaseGlyphs): + last: str = "└── " + mid: str = "├── " + backedge: str = "─" + vertical_edge: str = "│" + + +def generate_network_text( + graph, + with_labels=True, + sources=None, + max_depth=None, + ascii_only=False, + vertical_chains=False, +): + """Generate lines in the "network text" format + + This works via a depth-first traversal of the graph and writing a line for + each unique node encountered. Non-tree edges are written to the right of + each node, and connection to a non-tree edge is indicated with an ellipsis. + This representation works best when the input graph is a forest, but any + graph can be represented. + + This notation is original to networkx, although it is simple enough that it + may be known in existing literature. See #5602 for details. The procedure + is summarized as follows: + + 1. Given a set of source nodes (which can be specified, or automatically + discovered via finding the (strongly) connected components and choosing one + node with minimum degree from each), we traverse the graph in depth first + order. + + 2. Each reachable node will be printed exactly once on it's own line. + + 3. Edges are indicated in one of four ways: + + a. a parent "L-style" connection on the upper left. This corresponds to + a traversal in the directed DFS tree. + + b. a backref "<-style" connection shown directly on the right. For + directed graphs, these are drawn for any incoming edges to a node that + is not a parent edge. For undirected graphs, these are drawn for only + the non-parent edges that have already been represented (The edges that + have not been represented will be handled in the recursive case). + + c. a child "L-style" connection on the lower right. Drawing of the + children are handled recursively. + + d. if ``vertical_chains`` is true, and a parent node only has one child + a "vertical-style" edge is drawn between them. + + 4. The children of each node (wrt the directed DFS tree) are drawn + underneath and to the right of it. In the case that a child node has already + been drawn the connection is replaced with an ellipsis ("...") to indicate + that there is one or more connections represented elsewhere. + + 5. If a maximum depth is specified, an edge to nodes past this maximum + depth will be represented by an ellipsis. + + 6. If a node has a truthy "collapse" value, then we do not traverse past + that node. + + Parameters + ---------- + graph : nx.DiGraph | nx.Graph + Graph to represent + + with_labels : bool | str + If True will use the "label" attribute of a node to display if it + exists otherwise it will use the node value itself. If given as a + string, then that attribute name will be used instead of "label". + Defaults to True. + + sources : List + Specifies which nodes to start traversal from. Note: nodes that are not + reachable from one of these sources may not be shown. If unspecified, + the minimal set of nodes needed to reach all others will be used. + + max_depth : int | None + The maximum depth to traverse before stopping. Defaults to None. + + ascii_only : Boolean + If True only ASCII characters are used to construct the visualization + + vertical_chains : Boolean + If True, chains of nodes will be drawn vertically when possible. + + Yields + ------ + str : a line of generated text + + Examples + -------- + >>> graph = nx.path_graph(10) + >>> graph.add_node("A") + >>> graph.add_node("B") + >>> graph.add_node("C") + >>> graph.add_node("D") + >>> graph.add_edge(9, "A") + >>> graph.add_edge(9, "B") + >>> graph.add_edge(9, "C") + >>> graph.add_edge("C", "D") + >>> graph.add_edge("C", "E") + >>> graph.add_edge("C", "F") + >>> nx.write_network_text(graph) + ╙── 0 + └── 1 + └── 2 + └── 3 + └── 4 + └── 5 + └── 6 + └── 7 + └── 8 + └── 9 + ├── A + ├── B + └── C + ├── D + ├── E + └── F + >>> nx.write_network_text(graph, vertical_chains=True) + ╙── 0 + │ + 1 + │ + 2 + │ + 3 + │ + 4 + │ + 5 + │ + 6 + │ + 7 + │ + 8 + │ + 9 + ├── A + ├── B + └── C + ├── D + ├── E + └── F + """ + from typing import Any, NamedTuple + + class StackFrame(NamedTuple): + parent: Any + node: Any + indents: list + this_islast: bool + this_vertical: bool + + collapse_attr = "collapse" + + is_directed = graph.is_directed() + + if is_directed: + glyphs = AsciiDirectedGlyphs if ascii_only else UtfDirectedGlyphs + succ = graph.succ + pred = graph.pred + else: + glyphs = AsciiUndirectedGlyphs if ascii_only else UtfUndirectedGlyphs + succ = graph.adj + pred = graph.adj + + if isinstance(with_labels, str): + label_attr = with_labels + elif with_labels: + label_attr = "label" + else: + label_attr = None + + if max_depth == 0: + yield glyphs.empty + " ..." + elif len(graph.nodes) == 0: + yield glyphs.empty + else: + # If the nodes to traverse are unspecified, find the minimal set of + # nodes that will reach the entire graph + if sources is None: + sources = _find_sources(graph) + + # Populate the stack with each: + # 1. parent node in the DFS tree (or None for root nodes), + # 2. the current node in the DFS tree + # 2. a list of indentations indicating depth + # 3. a flag indicating if the node is the final one to be written. + # Reverse the stack so sources are popped in the correct order. + last_idx = len(sources) - 1 + stack = [ + StackFrame(None, node, [], (idx == last_idx), False) + for idx, node in enumerate(sources) + ][::-1] + + num_skipped_children = defaultdict(lambda: 0) + seen_nodes = set() + while stack: + parent, node, indents, this_islast, this_vertical = stack.pop() + + if node is not Ellipsis: + skip = node in seen_nodes + if skip: + # Mark that we skipped a parent's child + num_skipped_children[parent] += 1 + + if this_islast: + # If we reached the last child of a parent, and we skipped + # any of that parents children, then we should emit an + # ellipsis at the end after this. + if num_skipped_children[parent] and parent is not None: + # Append the ellipsis to be emitted last + next_islast = True + try_frame = StackFrame( + node, Ellipsis, indents, next_islast, False + ) + stack.append(try_frame) + + # Redo this frame, but not as a last object + next_islast = False + try_frame = StackFrame( + parent, node, indents, next_islast, this_vertical + ) + stack.append(try_frame) + continue + + if skip: + continue + seen_nodes.add(node) + + if not indents: + # Top level items (i.e. trees in the forest) get different + # glyphs to indicate they are not actually connected + if this_islast: + this_vertical = False + this_prefix = indents + [glyphs.newtree_last] + next_prefix = indents + [glyphs.endof_forest] + else: + this_prefix = indents + [glyphs.newtree_mid] + next_prefix = indents + [glyphs.within_forest] + + else: + # Non-top-level items + if this_vertical: + this_prefix = indents + next_prefix = indents + else: + if this_islast: + this_prefix = indents + [glyphs.last] + next_prefix = indents + [glyphs.endof_forest] + else: + this_prefix = indents + [glyphs.mid] + next_prefix = indents + [glyphs.within_tree] + + if node is Ellipsis: + label = " ..." + suffix = "" + children = [] + else: + if label_attr is not None: + label = str(graph.nodes[node].get(label_attr, node)) + else: + label = str(node) + + # Determine if we want to show the children of this node. + if collapse_attr is not None: + collapse = graph.nodes[node].get(collapse_attr, False) + else: + collapse = False + + # Determine: + # (1) children to traverse into after showing this node. + # (2) parents to immediately show to the right of this node. + if is_directed: + # In the directed case we must show every successor node + # note: it may be skipped later, but we don't have that + # information here. + children = list(succ[node]) + # In the directed case we must show every predecessor + # except for parent we directly traversed from. + handled_parents = {parent} + else: + # Showing only the unseen children results in a more + # concise representation for the undirected case. + children = [ + child for child in succ[node] if child not in seen_nodes + ] + + # In the undirected case, parents are also children, so we + # only need to immediately show the ones we can no longer + # traverse + handled_parents = {*children, parent} + + if max_depth is not None and len(indents) == max_depth - 1: + # Use ellipsis to indicate we have reached maximum depth + if children: + children = [Ellipsis] + handled_parents = {parent} + + if collapse: + # Collapsing a node is the same as reaching maximum depth + if children: + children = [Ellipsis] + handled_parents = {parent} + + # The other parents are other predecessors of this node that + # are not handled elsewhere. + other_parents = [p for p in pred[node] if p not in handled_parents] + if other_parents: + if label_attr is not None: + other_parents_labels = ", ".join( + [ + str(graph.nodes[p].get(label_attr, p)) + for p in other_parents + ] + ) + else: + other_parents_labels = ", ".join( + [str(p) for p in other_parents] + ) + suffix = " ".join(["", glyphs.backedge, other_parents_labels]) + else: + suffix = "" + + # Emit the line for this node, this will be called for each node + # exactly once. + if this_vertical: + yield "".join(this_prefix + [glyphs.vertical_edge]) + + yield "".join(this_prefix + [label, suffix]) + + if vertical_chains: + if is_directed: + num_children = len(set(children)) + else: + num_children = len(set(children) - {parent}) + # The next node can be drawn vertically if it is the only + # remaining child of this node. + next_is_vertical = num_children == 1 + else: + next_is_vertical = False + + # Push children on the stack in reverse order so they are popped in + # the original order. + for idx, child in enumerate(children[::-1]): + next_islast = idx == 0 + try_frame = StackFrame( + node, child, next_prefix, next_islast, next_is_vertical + ) + stack.append(try_frame) + + +@open_file(1, "w") +def write_network_text( + graph, + path=None, + with_labels=True, + sources=None, + max_depth=None, + ascii_only=False, + end="\n", + vertical_chains=False, +): + """Creates a nice text representation of a graph + + This works via a depth-first traversal of the graph and writing a line for + each unique node encountered. Non-tree edges are written to the right of + each node, and connection to a non-tree edge is indicated with an ellipsis. + This representation works best when the input graph is a forest, but any + graph can be represented. + + Parameters + ---------- + graph : nx.DiGraph | nx.Graph + Graph to represent + + path : string or file or callable or None + Filename or file handle for data output. + if a function, then it will be called for each generated line. + if None, this will default to "sys.stdout.write" + + with_labels : bool | str + If True will use the "label" attribute of a node to display if it + exists otherwise it will use the node value itself. If given as a + string, then that attribute name will be used instead of "label". + Defaults to True. + + sources : List + Specifies which nodes to start traversal from. Note: nodes that are not + reachable from one of these sources may not be shown. If unspecified, + the minimal set of nodes needed to reach all others will be used. + + max_depth : int | None + The maximum depth to traverse before stopping. Defaults to None. + + ascii_only : Boolean + If True only ASCII characters are used to construct the visualization + + end : string + The line ending character + + vertical_chains : Boolean + If True, chains of nodes will be drawn vertically when possible. + + Examples + -------- + >>> graph = nx.balanced_tree(r=2, h=2, create_using=nx.DiGraph) + >>> nx.write_network_text(graph) + ╙── 0 + ├─╼ 1 + │ ├─╼ 3 + │ └─╼ 4 + └─╼ 2 + ├─╼ 5 + └─╼ 6 + + >>> # A near tree with one non-tree edge + >>> graph.add_edge(5, 1) + >>> nx.write_network_text(graph) + ╙── 0 + ├─╼ 1 ╾ 5 + │ ├─╼ 3 + │ └─╼ 4 + └─╼ 2 + ├─╼ 5 + │ └─╼ ... + └─╼ 6 + + >>> graph = nx.cycle_graph(5) + >>> nx.write_network_text(graph) + ╙── 0 + ├── 1 + │ └── 2 + │ └── 3 + │ └── 4 ─ 0 + └── ... + + >>> graph = nx.cycle_graph(5, nx.DiGraph) + >>> nx.write_network_text(graph, vertical_chains=True) + ╙── 0 ╾ 4 + ╽ + 1 + ╽ + 2 + ╽ + 3 + ╽ + 4 + └─╼ ... + + >>> nx.write_network_text(graph, vertical_chains=True, ascii_only=True) + +-- 0 <- 4 + ! + 1 + ! + 2 + ! + 3 + ! + 4 + L-> ... + + >>> graph = nx.generators.barbell_graph(4, 2) + >>> nx.write_network_text(graph, vertical_chains=False) + ╙── 4 + ├── 5 + │ └── 6 + │ ├── 7 + │ │ ├── 8 ─ 6 + │ │ │ └── 9 ─ 6, 7 + │ │ └── ... + │ └── ... + └── 3 + ├── 0 + │ ├── 1 ─ 3 + │ │ └── 2 ─ 0, 3 + │ └── ... + └── ... + >>> nx.write_network_text(graph, vertical_chains=True) + ╙── 4 + ├── 5 + │ │ + │ 6 + │ ├── 7 + │ │ ├── 8 ─ 6 + │ │ │ │ + │ │ │ 9 ─ 6, 7 + │ │ └── ... + │ └── ... + └── 3 + ├── 0 + │ ├── 1 ─ 3 + │ │ │ + │ │ 2 ─ 0, 3 + │ └── ... + └── ... + + >>> graph = nx.complete_graph(5, create_using=nx.Graph) + >>> nx.write_network_text(graph) + ╙── 0 + ├── 1 + │ ├── 2 ─ 0 + │ │ ├── 3 ─ 0, 1 + │ │ │ └── 4 ─ 0, 1, 2 + │ │ └── ... + │ └── ... + └── ... + + >>> graph = nx.complete_graph(3, create_using=nx.DiGraph) + >>> nx.write_network_text(graph) + ╙── 0 ╾ 1, 2 + ├─╼ 1 ╾ 2 + │ ├─╼ 2 ╾ 0 + │ │ └─╼ ... + │ └─╼ ... + └─╼ ... + """ + if path is None: + # The path is unspecified, write to stdout + _write = sys.stdout.write + elif hasattr(path, "write"): + # The path is already an open file + _write = path.write + elif callable(path): + # The path is a custom callable + _write = path + else: + raise TypeError(type(path)) + + for line in generate_network_text( + graph, + with_labels=with_labels, + sources=sources, + max_depth=max_depth, + ascii_only=ascii_only, + vertical_chains=vertical_chains, + ): + _write(line + end) + + +def _find_sources(graph): + """ + Determine a minimal set of nodes such that the entire graph is reachable + """ + # For each connected part of the graph, choose at least + # one node as a starting point, preferably without a parent + if graph.is_directed(): + # Choose one node from each SCC with minimum in_degree + sccs = list(nx.strongly_connected_components(graph)) + # condensing the SCCs forms a dag, the nodes in this graph with + # 0 in-degree correspond to the SCCs from which the minimum set + # of nodes from which all other nodes can be reached. + scc_graph = nx.condensation(graph, sccs) + supernode_to_nodes = {sn: [] for sn in scc_graph.nodes()} + # Note: the order of mapping differs between pypy and cpython + # so we have to loop over graph nodes for consistency + mapping = scc_graph.graph["mapping"] + for n in graph.nodes: + sn = mapping[n] + supernode_to_nodes[sn].append(n) + sources = [] + for sn in scc_graph.nodes(): + if scc_graph.in_degree[sn] == 0: + scc = supernode_to_nodes[sn] + node = min(scc, key=lambda n: graph.in_degree[n]) + sources.append(node) + else: + # For undirected graph, the entire graph will be reachable as + # long as we consider one node from every connected component + sources = [ + min(cc, key=lambda n: graph.degree[n]) + for cc in nx.connected_components(graph) + ] + sources = sorted(sources, key=lambda n: graph.degree[n]) + return sources + + +def _parse_network_text(lines): + """Reconstructs a graph from a network text representation. + + This is mainly used for testing. Network text is for display, not + serialization, as such this cannot parse all network text representations + because node labels can be ambiguous with the glyphs and indentation used + to represent edge structure. Additionally, there is no way to determine if + disconnected graphs were originally directed or undirected. + + Parameters + ---------- + lines : list or iterator of strings + Input data in network text format + + Returns + ------- + G: NetworkX graph + The graph corresponding to the lines in network text format. + """ + from itertools import chain + from typing import Any, NamedTuple + + class ParseStackFrame(NamedTuple): + node: Any + indent: int + has_vertical_child: int | None + + initial_line_iter = iter(lines) + + is_ascii = None + is_directed = None + + ############## + # Initial Pass + ############## + + # Do an initial pass over the lines to determine what type of graph it is. + # Remember what these lines were, so we can reiterate over them in the + # parsing pass. + initial_lines = [] + try: + first_line = next(initial_line_iter) + except StopIteration: + ... + else: + initial_lines.append(first_line) + # The first character indicates if it is an ASCII or UTF graph + first_char = first_line[0] + if first_char in { + UtfBaseGlyphs.empty, + UtfBaseGlyphs.newtree_mid[0], + UtfBaseGlyphs.newtree_last[0], + }: + is_ascii = False + elif first_char in { + AsciiBaseGlyphs.empty, + AsciiBaseGlyphs.newtree_mid[0], + AsciiBaseGlyphs.newtree_last[0], + }: + is_ascii = True + else: + raise AssertionError(f"Unexpected first character: {first_char}") + + if is_ascii: + directed_glyphs = AsciiDirectedGlyphs.as_dict() + undirected_glyphs = AsciiUndirectedGlyphs.as_dict() + else: + directed_glyphs = UtfDirectedGlyphs.as_dict() + undirected_glyphs = UtfUndirectedGlyphs.as_dict() + + # For both directed / undirected glyphs, determine which glyphs never + # appear as substrings in the other undirected / directed glyphs. Glyphs + # with this property unambiguously indicates if a graph is directed / + # undirected. + directed_items = set(directed_glyphs.values()) + undirected_items = set(undirected_glyphs.values()) + unambiguous_directed_items = [] + for item in directed_items: + other_items = undirected_items + other_supersets = [other for other in other_items if item in other] + if not other_supersets: + unambiguous_directed_items.append(item) + unambiguous_undirected_items = [] + for item in undirected_items: + other_items = directed_items + other_supersets = [other for other in other_items if item in other] + if not other_supersets: + unambiguous_undirected_items.append(item) + + for line in initial_line_iter: + initial_lines.append(line) + if any(item in line for item in unambiguous_undirected_items): + is_directed = False + break + elif any(item in line for item in unambiguous_directed_items): + is_directed = True + break + + if is_directed is None: + # Not enough information to determine, choose undirected by default + is_directed = False + + glyphs = directed_glyphs if is_directed else undirected_glyphs + + # the backedge symbol by itself can be ambiguous, but with spaces around it + # becomes unambiguous. + backedge_symbol = " " + glyphs["backedge"] + " " + + # Reconstruct an iterator over all of the lines. + parsing_line_iter = chain(initial_lines, initial_line_iter) + + ############## + # Parsing Pass + ############## + + edges = [] + nodes = [] + is_empty = None + + noparent = object() # sentinel value + + # keep a stack of previous nodes that could be parents of subsequent nodes + stack = [ParseStackFrame(noparent, -1, None)] + + for line in parsing_line_iter: + if line == glyphs["empty"]: + # If the line is the empty glyph, we are done. + # There shouldn't be anything else after this. + is_empty = True + continue + + if backedge_symbol in line: + # This line has one or more backedges, separate those out + node_part, backedge_part = line.split(backedge_symbol) + backedge_nodes = [u.strip() for u in backedge_part.split(", ")] + # Now the node can be parsed + node_part = node_part.rstrip() + prefix, node = node_part.rsplit(" ", 1) + node = node.strip() + # Add the backedges to the edge list + edges.extend([(u, node) for u in backedge_nodes]) + else: + # No backedge, the tail of this line is the node + prefix, node = line.rsplit(" ", 1) + node = node.strip() + + prev = stack.pop() + + if node in glyphs["vertical_edge"]: + # Previous node is still the previous node, but we know it will + # have exactly one child, which will need to have its nesting level + # adjusted. + modified_prev = ParseStackFrame( + prev.node, + prev.indent, + True, + ) + stack.append(modified_prev) + continue + + # The length of the string before the node characters give us a hint + # about our nesting level. The only case where this doesn't work is + # when there are vertical chains, which is handled explicitly. + indent = len(prefix) + curr = ParseStackFrame(node, indent, None) + + if prev.has_vertical_child: + # In this case we know prev must be the parent of our current line, + # so we don't have to search the stack. (which is good because the + # indentation check wouldn't work in this case). + ... + else: + # If the previous node nesting-level is greater than the current + # nodes nesting-level than the previous node was the end of a path, + # and is not our parent. We can safely pop nodes off the stack + # until we find one with a comparable nesting-level, which is our + # parent. + while curr.indent <= prev.indent: + prev = stack.pop() + + if node == "...": + # The current previous node is no longer a valid parent, + # keep it popped from the stack. + stack.append(prev) + else: + # The previous and current nodes may still be parents, so add them + # back onto the stack. + stack.append(prev) + stack.append(curr) + + # Add the node and the edge to its parent to the node / edge lists. + nodes.append(curr.node) + if prev.node is not noparent: + edges.append((prev.node, curr.node)) + + if is_empty: + # Sanity check + assert len(nodes) == 0 + + # Reconstruct the graph + cls = nx.DiGraph if is_directed else nx.Graph + new = cls() + new.add_nodes_from(nodes) + new.add_edges_from(edges) + return new diff --git a/.venv/lib/python3.12/site-packages/networkx/relabel.py b/.venv/lib/python3.12/site-packages/networkx/relabel.py new file mode 100644 index 0000000..4b870f7 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/relabel.py @@ -0,0 +1,285 @@ +import networkx as nx + +__all__ = ["convert_node_labels_to_integers", "relabel_nodes"] + + +@nx._dispatchable( + preserve_all_attrs=True, mutates_input={"not copy": 2}, returns_graph=True +) +def relabel_nodes(G, mapping, copy=True): + """Relabel the nodes of the graph G according to a given mapping. + + The original node ordering may not be preserved if `copy` is `False` and the + mapping includes overlap between old and new labels. + + Parameters + ---------- + G : graph + A NetworkX graph + + mapping : dictionary + A dictionary with the old labels as keys and new labels as values. + A partial mapping is allowed. Mapping 2 nodes to a single node is allowed. + Any non-node keys in the mapping are ignored. + + copy : bool (optional, default=True) + If True return a copy, or if False relabel the nodes in place. + + Examples + -------- + To create a new graph with nodes relabeled according to a given + dictionary: + + >>> G = nx.path_graph(3) + >>> sorted(G) + [0, 1, 2] + >>> mapping = {0: "a", 1: "b", 2: "c"} + >>> H = nx.relabel_nodes(G, mapping) + >>> sorted(H) + ['a', 'b', 'c'] + + Nodes can be relabeled with any hashable object, including numbers + and strings: + + >>> import string + >>> G = nx.path_graph(26) # nodes are integers 0 through 25 + >>> sorted(G)[:3] + [0, 1, 2] + >>> mapping = dict(zip(G, string.ascii_lowercase)) + >>> G = nx.relabel_nodes(G, mapping) # nodes are characters a through z + >>> sorted(G)[:3] + ['a', 'b', 'c'] + >>> mapping = dict(zip(G, range(1, 27))) + >>> G = nx.relabel_nodes(G, mapping) # nodes are integers 1 through 26 + >>> sorted(G)[:3] + [1, 2, 3] + + To perform a partial in-place relabeling, provide a dictionary + mapping only a subset of the nodes, and set the `copy` keyword + argument to False: + + >>> G = nx.path_graph(3) # nodes 0-1-2 + >>> mapping = {0: "a", 1: "b"} # 0->'a' and 1->'b' + >>> G = nx.relabel_nodes(G, mapping, copy=False) + >>> sorted(G, key=str) + [2, 'a', 'b'] + + A mapping can also be given as a function: + + >>> G = nx.path_graph(3) + >>> H = nx.relabel_nodes(G, lambda x: x**2) + >>> list(H) + [0, 1, 4] + + In a multigraph, relabeling two or more nodes to the same new node + will retain all edges, but may change the edge keys in the process: + + >>> G = nx.MultiGraph() + >>> G.add_edge(0, 1, value="a") # returns the key for this edge + 0 + >>> G.add_edge(0, 2, value="b") + 0 + >>> G.add_edge(0, 3, value="c") + 0 + >>> mapping = {1: 4, 2: 4, 3: 4} + >>> H = nx.relabel_nodes(G, mapping, copy=True) + >>> print(H[0]) + {4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}} + + This works for in-place relabeling too: + + >>> G = nx.relabel_nodes(G, mapping, copy=False) + >>> print(G[0]) + {4: {0: {'value': 'a'}, 1: {'value': 'b'}, 2: {'value': 'c'}}} + + Notes + ----- + Only the nodes specified in the mapping will be relabeled. + Any non-node keys in the mapping are ignored. + + The keyword setting copy=False modifies the graph in place. + Relabel_nodes avoids naming collisions by building a + directed graph from ``mapping`` which specifies the order of + relabelings. Naming collisions, such as a->b, b->c, are ordered + such that "b" gets renamed to "c" before "a" gets renamed "b". + In cases of circular mappings (e.g. a->b, b->a), modifying the + graph is not possible in-place and an exception is raised. + In that case, use copy=True. + + If a relabel operation on a multigraph would cause two or more + edges to have the same source, target and key, the second edge must + be assigned a new key to retain all edges. The new key is set + to the lowest non-negative integer not already used as a key + for edges between these two nodes. Note that this means non-numeric + keys may be replaced by numeric keys. + + See Also + -------- + convert_node_labels_to_integers + """ + # you can pass any callable e.g. f(old_label) -> new_label or + # e.g. str(old_label) -> new_label, but we'll just make a dictionary here regardless + m = {n: mapping(n) for n in G} if callable(mapping) else mapping + + if copy: + return _relabel_copy(G, m) + else: + return _relabel_inplace(G, m) + + +def _relabel_inplace(G, mapping): + if len(mapping.keys() & mapping.values()) > 0: + # labels sets overlap + # can we topological sort and still do the relabeling? + D = nx.DiGraph(list(mapping.items())) + D.remove_edges_from(nx.selfloop_edges(D)) + try: + nodes = reversed(list(nx.topological_sort(D))) + except nx.NetworkXUnfeasible as err: + raise nx.NetworkXUnfeasible( + "The node label sets are overlapping and no ordering can " + "resolve the mapping. Use copy=True." + ) from err + else: + # non-overlapping label sets, sort them in the order of G nodes + nodes = [n for n in G if n in mapping] + + multigraph = G.is_multigraph() + directed = G.is_directed() + + for old in nodes: + # Test that old is in both mapping and G, otherwise ignore. + try: + new = mapping[old] + G.add_node(new, **G.nodes[old]) + except KeyError: + continue + if new == old: + continue + if multigraph: + new_edges = [ + (new, new if old == target else target, key, data) + for (_, target, key, data) in G.edges(old, data=True, keys=True) + ] + if directed: + new_edges += [ + (new if old == source else source, new, key, data) + for (source, _, key, data) in G.in_edges(old, data=True, keys=True) + ] + # Ensure new edges won't overwrite existing ones + seen = set() + for i, (source, target, key, data) in enumerate(new_edges): + if target in G[source] and key in G[source][target]: + new_key = 0 if not isinstance(key, int | float) else key + while new_key in G[source][target] or (target, new_key) in seen: + new_key += 1 + new_edges[i] = (source, target, new_key, data) + seen.add((target, new_key)) + else: + new_edges = [ + (new, new if old == target else target, data) + for (_, target, data) in G.edges(old, data=True) + ] + if directed: + new_edges += [ + (new if old == source else source, new, data) + for (source, _, data) in G.in_edges(old, data=True) + ] + G.remove_node(old) + G.add_edges_from(new_edges) + return G + + +def _relabel_copy(G, mapping): + H = G.__class__() + H.add_nodes_from(mapping.get(n, n) for n in G) + H._node.update((mapping.get(n, n), d.copy()) for n, d in G.nodes.items()) + if G.is_multigraph(): + new_edges = [ + (mapping.get(n1, n1), mapping.get(n2, n2), k, d.copy()) + for (n1, n2, k, d) in G.edges(keys=True, data=True) + ] + + # check for conflicting edge-keys + undirected = not G.is_directed() + seen_edges = set() + for i, (source, target, key, data) in enumerate(new_edges): + while (source, target, key) in seen_edges: + if not isinstance(key, int | float): + key = 0 + key += 1 + seen_edges.add((source, target, key)) + if undirected: + seen_edges.add((target, source, key)) + new_edges[i] = (source, target, key, data) + + H.add_edges_from(new_edges) + else: + H.add_edges_from( + (mapping.get(n1, n1), mapping.get(n2, n2), d.copy()) + for (n1, n2, d) in G.edges(data=True) + ) + H.graph.update(G.graph) + return H + + +@nx._dispatchable(preserve_all_attrs=True, returns_graph=True) +def convert_node_labels_to_integers( + G, first_label=0, ordering="default", label_attribute=None +): + """Returns a copy of the graph G with the nodes relabeled using + consecutive integers. + + Parameters + ---------- + G : graph + A NetworkX graph + + first_label : int, optional (default=0) + An integer specifying the starting offset in numbering nodes. + The new integer labels are numbered first_label, ..., n-1+first_label. + + ordering : string + "default" : inherit node ordering from G.nodes() + "sorted" : inherit node ordering from sorted(G.nodes()) + "increasing degree" : nodes are sorted by increasing degree + "decreasing degree" : nodes are sorted by decreasing degree + + label_attribute : string, optional (default=None) + Name of node attribute to store old label. If None no attribute + is created. + + Notes + ----- + Node and edge attribute data are copied to the new (relabeled) graph. + + There is no guarantee that the relabeling of nodes to integers will + give the same two integers for two (even identical graphs). + Use the `ordering` argument to try to preserve the order. + + See Also + -------- + relabel_nodes + """ + N = G.number_of_nodes() + first_label + if ordering == "default": + mapping = dict(zip(G.nodes(), range(first_label, N))) + elif ordering == "sorted": + nlist = sorted(G.nodes()) + mapping = dict(zip(nlist, range(first_label, N))) + elif ordering == "increasing degree": + dv_pairs = [(d, n) for (n, d) in G.degree()] + dv_pairs.sort() # in-place sort from lowest to highest degree + mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N))) + elif ordering == "decreasing degree": + dv_pairs = [(d, n) for (n, d) in G.degree()] + dv_pairs.sort() # in-place sort from lowest to highest degree + dv_pairs.reverse() + mapping = dict(zip([n for d, n in dv_pairs], range(first_label, N))) + else: + raise nx.NetworkXError(f"Unknown node ordering: {ordering}") + H = relabel_nodes(G, mapping) + # create node attribute with the old label + if label_attribute is not None: + nx.set_node_attributes(H, {v: k for k, v in mapping.items()}, label_attribute) + return H diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..4f7280c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_all_random_functions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_all_random_functions.cpython-312.pyc new file mode 100644 index 0000000..ac22383 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_all_random_functions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert.cpython-312.pyc new file mode 100644 index 0000000..db866aa Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_numpy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_numpy.cpython-312.pyc new file mode 100644 index 0000000..3e8322b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_numpy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_pandas.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_pandas.cpython-312.pyc new file mode 100644 index 0000000..c1f3526 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_pandas.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_scipy.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_scipy.cpython-312.pyc new file mode 100644 index 0000000..12490b2 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_convert_scipy.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_exceptions.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_exceptions.cpython-312.pyc new file mode 100644 index 0000000..ee9ffbd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_exceptions.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_import.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_import.cpython-312.pyc new file mode 100644 index 0000000..0e145ac Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_import.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_lazy_imports.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_lazy_imports.cpython-312.pyc new file mode 100644 index 0000000..78aa9c6 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_lazy_imports.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_relabel.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_relabel.cpython-312.pyc new file mode 100644 index 0000000..ff98f68 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/tests/__pycache__/test_relabel.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_all_random_functions.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_all_random_functions.py new file mode 100644 index 0000000..fb59159 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_all_random_functions.py @@ -0,0 +1,250 @@ +import random + +import pytest + +import networkx as nx +from networkx.algorithms import approximation as approx +from networkx.algorithms import threshold + +np = pytest.importorskip("numpy") + +progress = 0 + +# store the random numbers after setting a global seed +np.random.seed(42) +np_rv = np.random.rand() +random.seed(42) +py_rv = random.random() + + +def t(f, *args, **kwds): + """call one function and check if global RNG changed""" + global progress + progress += 1 + print(progress, ",", end="") + + f(*args, **kwds) + + after_np_rv = np.random.rand() + # if np_rv != after_np_rv: + # print(np_rv, after_np_rv, "don't match np!") + assert np_rv == after_np_rv + np.random.seed(42) + + after_py_rv = random.random() + # if py_rv != after_py_rv: + # print(py_rv, after_py_rv, "don't match py!") + assert py_rv == after_py_rv + random.seed(42) + + +def run_all_random_functions(seed): + n = 20 + m = 10 + k = l = 2 + s = v = 10 + p = q = p1 = p2 = p_in = p_out = 0.4 + alpha = radius = theta = 0.75 + sizes = (20, 20, 10) + colors = [1, 2, 3] + G = nx.barbell_graph(12, 20) + H = nx.cycle_graph(3) + H.add_weighted_edges_from((u, v, 0.2) for u, v in H.edges) + deg_sequence = [3, 2, 1, 3, 2, 1, 3, 2, 1, 2, 1, 2, 1] + in_degree_sequence = w = sequence = aseq = bseq = deg_sequence + + # print("starting...") + t(nx.maximal_independent_set, G, seed=seed) + t(nx.rich_club_coefficient, G, seed=seed, normalized=False) + t(nx.random_reference, G, seed=seed) + t(nx.lattice_reference, G, seed=seed) + t(nx.sigma, G, 1, 2, seed=seed) + t(nx.omega, G, 1, 2, seed=seed) + # print("out of smallworld.py") + t(nx.double_edge_swap, G, seed=seed) + # print("starting connected_double_edge_swap") + t(nx.connected_double_edge_swap, nx.complete_graph(9), seed=seed) + # print("ending connected_double_edge_swap") + t(nx.random_layout, G, seed=seed) + t(nx.fruchterman_reingold_layout, G, seed=seed) + t(nx.algebraic_connectivity, G, seed=seed) + t(nx.fiedler_vector, G, seed=seed) + t(nx.spectral_ordering, G, seed=seed) + # print('starting average_clustering') + t(approx.average_clustering, G, seed=seed) + t(approx.simulated_annealing_tsp, H, "greedy", source=1, seed=seed) + t(approx.threshold_accepting_tsp, H, "greedy", source=1, seed=seed) + t( + approx.traveling_salesman_problem, + H, + method=lambda G, weight: approx.simulated_annealing_tsp( + G, "greedy", weight, seed=seed + ), + ) + t( + approx.traveling_salesman_problem, + H, + method=lambda G, weight: approx.threshold_accepting_tsp( + G, "greedy", weight, seed=seed + ), + ) + t(nx.betweenness_centrality, G, seed=seed) + t(nx.edge_betweenness_centrality, G, seed=seed) + t(nx.approximate_current_flow_betweenness_centrality, G, seed=seed) + # print("kernighan") + t(nx.algorithms.community.kernighan_lin_bisection, G, seed=seed) + # nx.algorithms.community.asyn_lpa_communities(G, seed=seed) + t(nx.algorithms.tree.greedy_branching, G, seed=seed) + # print('done with graph argument functions') + + t(nx.spectral_graph_forge, G, alpha, seed=seed) + t(nx.algorithms.community.asyn_fluidc, G, k, max_iter=1, seed=seed) + t( + nx.algorithms.connectivity.edge_augmentation.greedy_k_edge_augmentation, + G, + k, + seed=seed, + ) + t(nx.algorithms.coloring.strategy_random_sequential, G, colors, seed=seed) + + cs = ["d", "i", "i", "d", "d", "i"] + t(threshold.swap_d, cs, seed=seed) + t(nx.configuration_model, deg_sequence, seed=seed) + t( + nx.directed_configuration_model, + in_degree_sequence, + in_degree_sequence, + seed=seed, + ) + t(nx.expected_degree_graph, w, seed=seed) + t(nx.random_degree_sequence_graph, sequence, seed=seed) + joint_degrees = { + 1: {4: 1}, + 2: {2: 2, 3: 2, 4: 2}, + 3: {2: 2, 4: 1}, + 4: {1: 1, 2: 2, 3: 1}, + } + t(nx.joint_degree_graph, joint_degrees, seed=seed) + joint_degree_sequence = [ + (1, 0), + (1, 0), + (1, 0), + (2, 0), + (1, 0), + (2, 1), + (0, 1), + (0, 1), + ] + t(nx.random_clustered_graph, joint_degree_sequence, seed=seed) + constructor = [(3, 3, 0.5), (10, 10, 0.7)] + t(nx.random_shell_graph, constructor, seed=seed) + mapping = {1: 0.4, 2: 0.3, 3: 0.3} + t(nx.utils.random_weighted_sample, mapping, k, seed=seed) + t(nx.utils.weighted_choice, mapping, seed=seed) + t(nx.algorithms.bipartite.configuration_model, aseq, bseq, seed=seed) + t(nx.algorithms.bipartite.preferential_attachment_graph, aseq, p, seed=seed) + + def kernel_integral(u, w, z): + return z - w + + t(nx.random_kernel_graph, n, kernel_integral, seed=seed) + + sizes = [75, 75, 300] + probs = [[0.25, 0.05, 0.02], [0.05, 0.35, 0.07], [0.02, 0.07, 0.40]] + t(nx.stochastic_block_model, sizes, probs, seed=seed) + t(nx.random_partition_graph, sizes, p_in, p_out, seed=seed) + + # print("starting generator functions") + t(threshold.random_threshold_sequence, n, p, seed=seed) + t(nx.tournament.random_tournament, n, seed=seed) + t(nx.relaxed_caveman_graph, l, k, p, seed=seed) + t(nx.planted_partition_graph, l, k, p_in, p_out, seed=seed) + t(nx.gaussian_random_partition_graph, n, s, v, p_in, p_out, seed=seed) + t(nx.gn_graph, n, seed=seed) + t(nx.gnr_graph, n, p, seed=seed) + t(nx.gnc_graph, n, seed=seed) + t(nx.scale_free_graph, n, seed=seed) + t(nx.directed.random_uniform_k_out_graph, n, k, seed=seed) + t(nx.random_k_out_graph, n, k, alpha, seed=seed) + N = 1000 + t(nx.partial_duplication_graph, N, n, p, q, seed=seed) + t(nx.duplication_divergence_graph, n, p, seed=seed) + t(nx.random_geometric_graph, n, radius, seed=seed) + t(nx.soft_random_geometric_graph, n, radius, seed=seed) + t(nx.geographical_threshold_graph, n, theta, seed=seed) + t(nx.waxman_graph, n, seed=seed) + t(nx.navigable_small_world_graph, n, seed=seed) + t(nx.thresholded_random_geometric_graph, n, radius, theta, seed=seed) + t(nx.uniform_random_intersection_graph, n, m, p, seed=seed) + t(nx.k_random_intersection_graph, n, m, k, seed=seed) + + t(nx.general_random_intersection_graph, n, 2, [0.1, 0.5], seed=seed) + t(nx.fast_gnp_random_graph, n, p, seed=seed) + t(nx.gnp_random_graph, n, p, seed=seed) + t(nx.dense_gnm_random_graph, n, m, seed=seed) + t(nx.gnm_random_graph, n, m, seed=seed) + t(nx.newman_watts_strogatz_graph, n, k, p, seed=seed) + t(nx.watts_strogatz_graph, n, k, p, seed=seed) + t(nx.connected_watts_strogatz_graph, n, k, p, seed=seed) + t(nx.random_regular_graph, 3, n, seed=seed) + t(nx.barabasi_albert_graph, n, m, seed=seed) + t(nx.extended_barabasi_albert_graph, n, m, p, q, seed=seed) + t(nx.powerlaw_cluster_graph, n, m, p, seed=seed) + t(nx.random_lobster, n, p1, p2, seed=seed) + t(nx.random_powerlaw_tree, n, seed=seed, tries=5000) + t(nx.random_powerlaw_tree_sequence, 10, seed=seed, tries=5000) + t(nx.random_labeled_tree, n, seed=seed) + t(nx.utils.powerlaw_sequence, n, seed=seed) + t(nx.utils.zipf_rv, 2.3, seed=seed) + cdist = [0.2, 0.4, 0.5, 0.7, 0.9, 1.0] + t(nx.utils.discrete_sequence, n, cdistribution=cdist, seed=seed) + t(nx.algorithms.bipartite.random_graph, n, m, p, seed=seed) + t(nx.algorithms.bipartite.gnmk_random_graph, n, m, k, seed=seed) + LFR = nx.generators.LFR_benchmark_graph + t( + LFR, + 25, + 3, + 1.5, + 0.1, + average_degree=3, + min_community=10, + seed=seed, + max_community=20, + ) + t(nx.random_internet_as_graph, n, seed=seed) + # print("done") + + +# choose to test an integer seed, or whether a single RNG can be everywhere +# np_rng = np.random.RandomState(14) +# seed = np_rng +# seed = 14 + + +@pytest.mark.slow +# print("NetworkX Version:", nx.__version__) +def test_rng_interface(): + global progress + + # try different kinds of seeds + for seed in [14, np.random.RandomState(14)]: + np.random.seed(42) + random.seed(42) + run_all_random_functions(seed) + progress = 0 + + # check that both global RNGs are unaffected + after_np_rv = np.random.rand() + # if np_rv != after_np_rv: + # print(np_rv, after_np_rv, "don't match np!") + assert np_rv == after_np_rv + after_py_rv = random.random() + # if py_rv != after_py_rv: + # print(py_rv, after_py_rv, "don't match py!") + assert py_rv == after_py_rv + + +# print("\nDone testing seed:", seed) + +# test_rng_interface() diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_convert.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert.py new file mode 100644 index 0000000..44bed94 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert.py @@ -0,0 +1,321 @@ +import pytest + +import networkx as nx +from networkx.convert import ( + from_dict_of_dicts, + from_dict_of_lists, + to_dict_of_dicts, + to_dict_of_lists, + to_networkx_graph, +) +from networkx.generators.classic import barbell_graph, cycle_graph +from networkx.utils import edges_equal, graphs_equal, nodes_equal + + +class TestConvert: + def edgelists_equal(self, e1, e2): + return sorted(sorted(e) for e in e1) == sorted(sorted(e) for e in e2) + + def test_simple_graphs(self): + for dest, source in [ + (to_dict_of_dicts, from_dict_of_dicts), + (to_dict_of_lists, from_dict_of_lists), + ]: + G = barbell_graph(10, 3) + G.graph = {} + dod = dest(G) + + # Dict of [dicts, lists] + GG = source(dod) + assert graphs_equal(G, GG) + GW = to_networkx_graph(dod) + assert graphs_equal(G, GW) + GI = nx.Graph(dod) + assert graphs_equal(G, GI) + + # With nodelist keyword + P4 = nx.path_graph(4) + P3 = nx.path_graph(3) + P4.graph = {} + P3.graph = {} + dod = dest(P4, nodelist=[0, 1, 2]) + Gdod = nx.Graph(dod) + assert graphs_equal(Gdod, P3) + + def test_exceptions(self): + # NX graph + class G: + adj = None + + pytest.raises(nx.NetworkXError, to_networkx_graph, G) + + # pygraphviz agraph + class G: + is_strict = None + + pytest.raises(nx.NetworkXError, to_networkx_graph, G) + + # Dict of [dicts, lists] + G = {"a": 0} + pytest.raises(TypeError, to_networkx_graph, G) + + # list or generator of edges + class G: + next = None + + pytest.raises(nx.NetworkXError, to_networkx_graph, G) + + # no match + pytest.raises(nx.NetworkXError, to_networkx_graph, "a") + + def test_digraphs(self): + for dest, source in [ + (to_dict_of_dicts, from_dict_of_dicts), + (to_dict_of_lists, from_dict_of_lists), + ]: + G = cycle_graph(10) + + # Dict of [dicts, lists] + dod = dest(G) + GG = source(dod) + assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) + assert edges_equal(sorted(G.edges()), sorted(GG.edges())) + GW = to_networkx_graph(dod) + assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) + assert edges_equal(sorted(G.edges()), sorted(GW.edges())) + GI = nx.Graph(dod) + assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes())) + assert edges_equal(sorted(G.edges()), sorted(GI.edges())) + + G = cycle_graph(10, create_using=nx.DiGraph) + dod = dest(G) + GG = source(dod, create_using=nx.DiGraph) + assert sorted(G.nodes()) == sorted(GG.nodes()) + assert sorted(G.edges()) == sorted(GG.edges()) + GW = to_networkx_graph(dod, create_using=nx.DiGraph) + assert sorted(G.nodes()) == sorted(GW.nodes()) + assert sorted(G.edges()) == sorted(GW.edges()) + GI = nx.DiGraph(dod) + assert sorted(G.nodes()) == sorted(GI.nodes()) + assert sorted(G.edges()) == sorted(GI.edges()) + + def test_graph(self): + g = nx.cycle_graph(10) + G = nx.Graph() + G.add_nodes_from(g) + G.add_weighted_edges_from((u, v, u) for u, v in g.edges()) + + # Dict of dicts + dod = to_dict_of_dicts(G) + GG = from_dict_of_dicts(dod, create_using=nx.Graph) + assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) + assert edges_equal(sorted(G.edges()), sorted(GG.edges())) + GW = to_networkx_graph(dod, create_using=nx.Graph) + assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) + assert edges_equal(sorted(G.edges()), sorted(GW.edges())) + GI = nx.Graph(dod) + assert sorted(G.nodes()) == sorted(GI.nodes()) + assert sorted(G.edges()) == sorted(GI.edges()) + + # Dict of lists + dol = to_dict_of_lists(G) + GG = from_dict_of_lists(dol, create_using=nx.Graph) + # dict of lists throws away edge data so set it to none + enone = [(u, v, {}) for (u, v, d) in G.edges(data=True)] + assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) + assert edges_equal(enone, sorted(GG.edges(data=True))) + GW = to_networkx_graph(dol, create_using=nx.Graph) + assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) + assert edges_equal(enone, sorted(GW.edges(data=True))) + GI = nx.Graph(dol) + assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes())) + assert edges_equal(enone, sorted(GI.edges(data=True))) + + def test_with_multiedges_self_loops(self): + G = cycle_graph(10) + XG = nx.Graph() + XG.add_nodes_from(G) + XG.add_weighted_edges_from((u, v, u) for u, v in G.edges()) + XGM = nx.MultiGraph() + XGM.add_nodes_from(G) + XGM.add_weighted_edges_from((u, v, u) for u, v in G.edges()) + XGM.add_edge(0, 1, weight=2) # multiedge + XGS = nx.Graph() + XGS.add_nodes_from(G) + XGS.add_weighted_edges_from((u, v, u) for u, v in G.edges()) + XGS.add_edge(0, 0, weight=100) # self loop + + # Dict of dicts + # with self loops, OK + dod = to_dict_of_dicts(XGS) + GG = from_dict_of_dicts(dod, create_using=nx.Graph) + assert nodes_equal(XGS.nodes(), GG.nodes()) + assert edges_equal(XGS.edges(), GG.edges()) + GW = to_networkx_graph(dod, create_using=nx.Graph) + assert nodes_equal(XGS.nodes(), GW.nodes()) + assert edges_equal(XGS.edges(), GW.edges()) + GI = nx.Graph(dod) + assert nodes_equal(XGS.nodes(), GI.nodes()) + assert edges_equal(XGS.edges(), GI.edges()) + + # Dict of lists + # with self loops, OK + dol = to_dict_of_lists(XGS) + GG = from_dict_of_lists(dol, create_using=nx.Graph) + # dict of lists throws away edge data so set it to none + enone = [(u, v, {}) for (u, v, d) in XGS.edges(data=True)] + assert nodes_equal(sorted(XGS.nodes()), sorted(GG.nodes())) + assert edges_equal(enone, sorted(GG.edges(data=True))) + GW = to_networkx_graph(dol, create_using=nx.Graph) + assert nodes_equal(sorted(XGS.nodes()), sorted(GW.nodes())) + assert edges_equal(enone, sorted(GW.edges(data=True))) + GI = nx.Graph(dol) + assert nodes_equal(sorted(XGS.nodes()), sorted(GI.nodes())) + assert edges_equal(enone, sorted(GI.edges(data=True))) + + # Dict of dicts + # with multiedges, OK + dod = to_dict_of_dicts(XGM) + GG = from_dict_of_dicts(dod, create_using=nx.MultiGraph, multigraph_input=True) + assert nodes_equal(sorted(XGM.nodes()), sorted(GG.nodes())) + assert edges_equal(sorted(XGM.edges()), sorted(GG.edges())) + GW = to_networkx_graph(dod, create_using=nx.MultiGraph, multigraph_input=True) + assert nodes_equal(sorted(XGM.nodes()), sorted(GW.nodes())) + assert edges_equal(sorted(XGM.edges()), sorted(GW.edges())) + GI = nx.MultiGraph(dod) + assert nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes())) + assert sorted(XGM.edges()) == sorted(GI.edges()) + GE = from_dict_of_dicts(dod, create_using=nx.MultiGraph, multigraph_input=False) + assert nodes_equal(sorted(XGM.nodes()), sorted(GE.nodes())) + assert sorted(XGM.edges()) != sorted(GE.edges()) + GI = nx.MultiGraph(XGM) + assert nodes_equal(sorted(XGM.nodes()), sorted(GI.nodes())) + assert edges_equal(sorted(XGM.edges()), sorted(GI.edges())) + GM = nx.MultiGraph(G) + assert nodes_equal(sorted(GM.nodes()), sorted(G.nodes())) + assert edges_equal(sorted(GM.edges()), sorted(G.edges())) + + # Dict of lists + # with multiedges, OK, but better write as DiGraph else you'll + # get double edges + dol = to_dict_of_lists(G) + GG = from_dict_of_lists(dol, create_using=nx.MultiGraph) + assert nodes_equal(sorted(G.nodes()), sorted(GG.nodes())) + assert edges_equal(sorted(G.edges()), sorted(GG.edges())) + GW = to_networkx_graph(dol, create_using=nx.MultiGraph) + assert nodes_equal(sorted(G.nodes()), sorted(GW.nodes())) + assert edges_equal(sorted(G.edges()), sorted(GW.edges())) + GI = nx.MultiGraph(dol) + assert nodes_equal(sorted(G.nodes()), sorted(GI.nodes())) + assert edges_equal(sorted(G.edges()), sorted(GI.edges())) + + def test_edgelists(self): + P = nx.path_graph(4) + e = [(0, 1), (1, 2), (2, 3)] + G = nx.Graph(e) + assert nodes_equal(sorted(G.nodes()), sorted(P.nodes())) + assert edges_equal(sorted(G.edges()), sorted(P.edges())) + assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True))) + + e = [(0, 1, {}), (1, 2, {}), (2, 3, {})] + G = nx.Graph(e) + assert nodes_equal(sorted(G.nodes()), sorted(P.nodes())) + assert edges_equal(sorted(G.edges()), sorted(P.edges())) + assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True))) + + e = ((n, n + 1) for n in range(3)) + G = nx.Graph(e) + assert nodes_equal(sorted(G.nodes()), sorted(P.nodes())) + assert edges_equal(sorted(G.edges()), sorted(P.edges())) + assert edges_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True))) + + def test_directed_to_undirected(self): + edges1 = [(0, 1), (1, 2), (2, 0)] + edges2 = [(0, 1), (1, 2), (0, 2)] + assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges1)).edges(), edges1) + assert self.edgelists_equal(nx.Graph(nx.DiGraph(edges2)).edges(), edges1) + assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges1)).edges(), edges1) + assert self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges2)).edges(), edges1) + + assert self.edgelists_equal( + nx.MultiGraph(nx.MultiDiGraph(edges1)).edges(), edges1 + ) + assert self.edgelists_equal( + nx.MultiGraph(nx.MultiDiGraph(edges2)).edges(), edges1 + ) + + assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges1)).edges(), edges1) + assert self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges2)).edges(), edges1) + + def test_attribute_dict_integrity(self): + # we must not replace dict-like graph data structures with dicts + G = nx.Graph() + G.add_nodes_from("abc") + H = to_networkx_graph(G, create_using=nx.Graph) + assert list(H.nodes) == list(G.nodes) + H = nx.DiGraph(G) + assert list(H.nodes) == list(G.nodes) + + def test_to_edgelist(self): + G = nx.Graph([(1, 1)]) + elist = nx.to_edgelist(G, nodelist=list(G)) + assert edges_equal(G.edges(data=True), elist) + + def test_custom_node_attr_dict_safekeeping(self): + class custom_dict(dict): + pass + + class Custom(nx.Graph): + node_attr_dict_factory = custom_dict + + g = nx.Graph() + g.add_node(1, weight=1) + + h = Custom(g) + assert isinstance(g._node[1], dict) + assert isinstance(h._node[1], custom_dict) + + # this raise exception + # h._node.update((n, dd.copy()) for n, dd in g.nodes.items()) + # assert isinstance(h._node[1], custom_dict) + + +@pytest.mark.parametrize( + "edgelist", + ( + # Graph with no edge data + [(0, 1), (1, 2)], + # Graph with edge data + [(0, 1, {"weight": 1.0}), (1, 2, {"weight": 2.0})], + ), +) +def test_to_dict_of_dicts_with_edgedata_param(edgelist): + G = nx.Graph() + G.add_edges_from(edgelist) + # Innermost dict value == edge_data when edge_data != None. + # In the case when G has edge data, it is overwritten + expected = {0: {1: 10}, 1: {0: 10, 2: 10}, 2: {1: 10}} + assert nx.to_dict_of_dicts(G, edge_data=10) == expected + + +def test_to_dict_of_dicts_with_edgedata_and_nodelist(): + G = nx.path_graph(5) + nodelist = [2, 3, 4] + expected = {2: {3: 10}, 3: {2: 10, 4: 10}, 4: {3: 10}} + assert nx.to_dict_of_dicts(G, nodelist=nodelist, edge_data=10) == expected + + +def test_to_dict_of_dicts_with_edgedata_multigraph(): + """Multi edge data overwritten when edge_data != None""" + G = nx.MultiGraph() + G.add_edge(0, 1, key="a") + G.add_edge(0, 1, key="b") + # Multi edge data lost when edge_data is not None + expected = {0: {1: 10}, 1: {0: 10}} + assert nx.to_dict_of_dicts(G, edge_data=10) == expected + + +def test_to_networkx_graph_non_edgelist(): + invalid_edgelist = [1, 2, 3] + with pytest.raises(nx.NetworkXError, match="Input is not a valid edge list"): + nx.to_networkx_graph(invalid_edgelist) diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_numpy.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_numpy.py new file mode 100644 index 0000000..0a554fd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_numpy.py @@ -0,0 +1,531 @@ +import itertools + +import pytest + +import networkx as nx +from networkx.utils import graphs_equal + +np = pytest.importorskip("numpy") +npt = pytest.importorskip("numpy.testing") + + +class TestConvertNumpyArray: + def setup_method(self): + self.G1 = nx.barbell_graph(10, 3) + self.G2 = nx.cycle_graph(10, create_using=nx.DiGraph) + self.G3 = self.create_weighted(nx.Graph()) + self.G4 = self.create_weighted(nx.DiGraph()) + + def create_weighted(self, G): + g = nx.cycle_graph(4) + G.add_nodes_from(g) + G.add_weighted_edges_from((u, v, 10 + u) for u, v in g.edges()) + return G + + def assert_equal(self, G1, G2): + assert sorted(G1.nodes()) == sorted(G2.nodes()) + assert sorted(G1.edges()) == sorted(G2.edges()) + + def identity_conversion(self, G, A, create_using): + assert A.sum() > 0 + GG = nx.from_numpy_array(A, create_using=create_using) + self.assert_equal(G, GG) + GW = nx.to_networkx_graph(A, create_using=create_using) + self.assert_equal(G, GW) + GI = nx.empty_graph(0, create_using).__class__(A) + self.assert_equal(G, GI) + + def test_shape(self): + "Conversion from non-square array." + A = np.array([[1, 2, 3], [4, 5, 6]]) + pytest.raises(nx.NetworkXError, nx.from_numpy_array, A) + + def test_identity_graph_array(self): + "Conversion from graph to array to graph." + A = nx.to_numpy_array(self.G1) + self.identity_conversion(self.G1, A, nx.Graph()) + + def test_identity_digraph_array(self): + """Conversion from digraph to array to digraph.""" + A = nx.to_numpy_array(self.G2) + self.identity_conversion(self.G2, A, nx.DiGraph()) + + def test_identity_weighted_graph_array(self): + """Conversion from weighted graph to array to weighted graph.""" + A = nx.to_numpy_array(self.G3) + self.identity_conversion(self.G3, A, nx.Graph()) + + def test_identity_weighted_digraph_array(self): + """Conversion from weighted digraph to array to weighted digraph.""" + A = nx.to_numpy_array(self.G4) + self.identity_conversion(self.G4, A, nx.DiGraph()) + + def test_nodelist(self): + """Conversion from graph to array to graph with nodelist.""" + P4 = nx.path_graph(4) + P3 = nx.path_graph(3) + nodelist = list(P3) + A = nx.to_numpy_array(P4, nodelist=nodelist) + GA = nx.Graph(A) + self.assert_equal(GA, P3) + + # Make nodelist ambiguous by containing duplicates. + nodelist += [nodelist[0]] + pytest.raises(nx.NetworkXError, nx.to_numpy_array, P3, nodelist=nodelist) + + # Make nodelist invalid by including nonexistent nodes + nodelist = [-1, 0, 1] + with pytest.raises( + nx.NetworkXError, + match=f"Nodes {nodelist - P3.nodes} in nodelist is not in G", + ): + nx.to_numpy_array(P3, nodelist=nodelist) + + def test_weight_keyword(self): + WP4 = nx.Graph() + WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3)) + P4 = nx.path_graph(4) + A = nx.to_numpy_array(P4) + np.testing.assert_equal(A, nx.to_numpy_array(WP4, weight=None)) + np.testing.assert_equal(0.5 * A, nx.to_numpy_array(WP4)) + np.testing.assert_equal(0.3 * A, nx.to_numpy_array(WP4, weight="other")) + + def test_from_numpy_array_type(self): + A = np.array([[1]]) + G = nx.from_numpy_array(A) + assert isinstance(G[0][0]["weight"], int) + + A = np.array([[1]]).astype(float) + G = nx.from_numpy_array(A) + assert isinstance(G[0][0]["weight"], float) + + A = np.array([[1]]).astype(str) + G = nx.from_numpy_array(A) + assert isinstance(G[0][0]["weight"], str) + + A = np.array([[1]]).astype(bool) + G = nx.from_numpy_array(A) + assert isinstance(G[0][0]["weight"], bool) + + A = np.array([[1]]).astype(complex) + G = nx.from_numpy_array(A) + assert isinstance(G[0][0]["weight"], complex) + + A = np.array([[1]]).astype(object) + pytest.raises(TypeError, nx.from_numpy_array, A) + + A = np.array([[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]]) + with pytest.raises( + nx.NetworkXError, match=f"Input array must be 2D, not {A.ndim}" + ): + g = nx.from_numpy_array(A) + + def test_from_numpy_array_dtype(self): + dt = [("weight", float), ("cost", int)] + A = np.array([[(1.0, 2)]], dtype=dt) + G = nx.from_numpy_array(A) + assert isinstance(G[0][0]["weight"], float) + assert isinstance(G[0][0]["cost"], int) + assert G[0][0]["cost"] == 2 + assert G[0][0]["weight"] == 1.0 + + def test_from_numpy_array_parallel_edges(self): + """Tests that the :func:`networkx.from_numpy_array` function + interprets integer weights as the number of parallel edges when + creating a multigraph. + + """ + A = np.array([[1, 1], [1, 2]]) + # First, with a simple graph, each integer entry in the adjacency + # matrix is interpreted as the weight of a single edge in the graph. + expected = nx.DiGraph() + edges = [(0, 0), (0, 1), (1, 0)] + expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) + expected.add_edge(1, 1, weight=2) + actual = nx.from_numpy_array(A, parallel_edges=True, create_using=nx.DiGraph) + assert graphs_equal(actual, expected) + actual = nx.from_numpy_array(A, parallel_edges=False, create_using=nx.DiGraph) + assert graphs_equal(actual, expected) + # Now each integer entry in the adjacency matrix is interpreted as the + # number of parallel edges in the graph if the appropriate keyword + # argument is specified. + edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)] + expected = nx.MultiDiGraph() + expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) + actual = nx.from_numpy_array( + A, parallel_edges=True, create_using=nx.MultiDiGraph + ) + assert graphs_equal(actual, expected) + expected = nx.MultiDiGraph() + expected.add_edges_from(set(edges), weight=1) + # The sole self-loop (edge 0) on vertex 1 should have weight 2. + expected[1][1][0]["weight"] = 2 + actual = nx.from_numpy_array( + A, parallel_edges=False, create_using=nx.MultiDiGraph + ) + assert graphs_equal(actual, expected) + + @pytest.mark.parametrize( + "dt", + ( + None, # default + int, # integer dtype + np.dtype( + [("weight", "f8"), ("color", "i1")] + ), # Structured dtype with named fields + ), + ) + def test_from_numpy_array_no_edge_attr(self, dt): + A = np.array([[0, 1], [1, 0]], dtype=dt) + G = nx.from_numpy_array(A, edge_attr=None) + assert "weight" not in G.edges[0, 1] + assert len(G.edges[0, 1]) == 0 + + def test_from_numpy_array_multiedge_no_edge_attr(self): + A = np.array([[0, 2], [2, 0]]) + G = nx.from_numpy_array(A, create_using=nx.MultiDiGraph, edge_attr=None) + assert all("weight" not in e for _, e in G[0][1].items()) + assert len(G[0][1][0]) == 0 + + def test_from_numpy_array_custom_edge_attr(self): + A = np.array([[0, 2], [3, 0]]) + G = nx.from_numpy_array(A, edge_attr="cost") + assert "weight" not in G.edges[0, 1] + assert G.edges[0, 1]["cost"] == 3 + + def test_symmetric(self): + """Tests that a symmetric array has edges added only once to an + undirected multigraph when using :func:`networkx.from_numpy_array`. + + """ + A = np.array([[0, 1], [1, 0]]) + G = nx.from_numpy_array(A, create_using=nx.MultiGraph) + expected = nx.MultiGraph() + expected.add_edge(0, 1, weight=1) + assert graphs_equal(G, expected) + + def test_dtype_int_graph(self): + """Test that setting dtype int actually gives an integer array. + + For more information, see GitHub pull request #1363. + + """ + G = nx.complete_graph(3) + A = nx.to_numpy_array(G, dtype=int) + assert A.dtype == int + + def test_dtype_int_multigraph(self): + """Test that setting dtype int actually gives an integer array. + + For more information, see GitHub pull request #1363. + + """ + G = nx.MultiGraph(nx.complete_graph(3)) + A = nx.to_numpy_array(G, dtype=int) + assert A.dtype == int + + +@pytest.fixture +def multigraph_test_graph(): + G = nx.MultiGraph() + G.add_edge(1, 2, weight=7) + G.add_edge(1, 2, weight=70) + return G + + +@pytest.mark.parametrize(("operator", "expected"), ((sum, 77), (min, 7), (max, 70))) +def test_numpy_multigraph(multigraph_test_graph, operator, expected): + A = nx.to_numpy_array(multigraph_test_graph, multigraph_weight=operator) + assert A[1, 0] == expected + + +def test_to_numpy_array_multigraph_nodelist(multigraph_test_graph): + G = multigraph_test_graph + G.add_edge(0, 1, weight=3) + A = nx.to_numpy_array(G, nodelist=[1, 2]) + assert A.shape == (2, 2) + assert A[1, 0] == 77 + + +@pytest.mark.parametrize( + "G, expected", + [ + (nx.Graph(), np.array([[0, 1 + 2j], [1 + 2j, 0]], dtype=complex)), + (nx.DiGraph(), np.array([[0, 1 + 2j], [0, 0]], dtype=complex)), + ], +) +def test_to_numpy_array_complex_weights(G, expected): + G.add_edge(0, 1, weight=1 + 2j) + A = nx.to_numpy_array(G, dtype=complex) + npt.assert_array_equal(A, expected) + + +def test_to_numpy_array_arbitrary_weights(): + G = nx.DiGraph() + w = 922337203685477580102 # Out of range for int64 + G.add_edge(0, 1, weight=922337203685477580102) # val not representable by int64 + A = nx.to_numpy_array(G, dtype=object) + expected = np.array([[0, w], [0, 0]], dtype=object) + npt.assert_array_equal(A, expected) + + # Undirected + A = nx.to_numpy_array(G.to_undirected(), dtype=object) + expected = np.array([[0, w], [w, 0]], dtype=object) + npt.assert_array_equal(A, expected) + + +@pytest.mark.parametrize( + "func, expected", + ((min, -1), (max, 10), (sum, 11), (np.mean, 11 / 3), (np.median, 2)), +) +def test_to_numpy_array_multiweight_reduction(func, expected): + """Test various functions for reducing multiedge weights.""" + G = nx.MultiDiGraph() + weights = [-1, 2, 10.0] + for w in weights: + G.add_edge(0, 1, weight=w) + A = nx.to_numpy_array(G, multigraph_weight=func, dtype=float) + assert np.allclose(A, [[0, expected], [0, 0]]) + + # Undirected case + A = nx.to_numpy_array(G.to_undirected(), multigraph_weight=func, dtype=float) + assert np.allclose(A, [[0, expected], [expected, 0]]) + + +@pytest.mark.parametrize( + ("G, expected"), + [ + (nx.Graph(), [[(0, 0), (10, 5)], [(10, 5), (0, 0)]]), + (nx.DiGraph(), [[(0, 0), (10, 5)], [(0, 0), (0, 0)]]), + ], +) +def test_to_numpy_array_structured_dtype_attrs_from_fields(G, expected): + """When `dtype` is structured (i.e. has names) and `weight` is None, use + the named fields of the dtype to look up edge attributes.""" + G.add_edge(0, 1, weight=10, cost=5.0) + dtype = np.dtype([("weight", int), ("cost", int)]) + A = nx.to_numpy_array(G, dtype=dtype, weight=None) + expected = np.asarray(expected, dtype=dtype) + npt.assert_array_equal(A, expected) + + +def test_to_numpy_array_structured_dtype_single_attr_default(): + G = nx.path_graph(3) + dtype = np.dtype([("weight", float)]) # A single named field + A = nx.to_numpy_array(G, dtype=dtype, weight=None) + expected = np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]], dtype=float) + npt.assert_array_equal(A["weight"], expected) + + +@pytest.mark.parametrize( + ("field_name", "expected_attr_val"), + [ + ("weight", 1), + ("cost", 3), + ], +) +def test_to_numpy_array_structured_dtype_single_attr(field_name, expected_attr_val): + G = nx.Graph() + G.add_edge(0, 1, cost=3) + dtype = np.dtype([(field_name, float)]) + A = nx.to_numpy_array(G, dtype=dtype, weight=None) + expected = np.array([[0, expected_attr_val], [expected_attr_val, 0]], dtype=float) + npt.assert_array_equal(A[field_name], expected) + + +@pytest.mark.parametrize("graph_type", (nx.Graph, nx.DiGraph)) +@pytest.mark.parametrize( + "edge", + [ + (0, 1), # No edge attributes + (0, 1, {"weight": 10}), # One edge attr + (0, 1, {"weight": 5, "flow": -4}), # Multiple but not all edge attrs + (0, 1, {"weight": 2.0, "cost": 10, "flow": -45}), # All attrs + ], +) +def test_to_numpy_array_structured_dtype_multiple_fields(graph_type, edge): + G = graph_type([edge]) + dtype = np.dtype([("weight", float), ("cost", float), ("flow", float)]) + A = nx.to_numpy_array(G, dtype=dtype, weight=None) + for attr in dtype.names: + expected = nx.to_numpy_array(G, dtype=float, weight=attr) + npt.assert_array_equal(A[attr], expected) + + +@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph())) +def test_to_numpy_array_structured_dtype_scalar_nonedge(G): + G.add_edge(0, 1, weight=10) + dtype = np.dtype([("weight", float), ("cost", float)]) + A = nx.to_numpy_array(G, dtype=dtype, weight=None, nonedge=np.nan) + for attr in dtype.names: + expected = nx.to_numpy_array(G, dtype=float, weight=attr, nonedge=np.nan) + npt.assert_array_equal(A[attr], expected) + + +@pytest.mark.parametrize("G", (nx.Graph(), nx.DiGraph())) +def test_to_numpy_array_structured_dtype_nonedge_ary(G): + """Similar to the scalar case, except has a different non-edge value for + each named field.""" + G.add_edge(0, 1, weight=10) + dtype = np.dtype([("weight", float), ("cost", float)]) + nonedges = np.array([(0, np.inf)], dtype=dtype) + A = nx.to_numpy_array(G, dtype=dtype, weight=None, nonedge=nonedges) + for attr in dtype.names: + nonedge = nonedges[attr] + expected = nx.to_numpy_array(G, dtype=float, weight=attr, nonedge=nonedge) + npt.assert_array_equal(A[attr], expected) + + +def test_to_numpy_array_structured_dtype_with_weight_raises(): + """Using both a structured dtype (with named fields) and specifying a `weight` + parameter is ambiguous.""" + G = nx.path_graph(3) + dtype = np.dtype([("weight", int), ("cost", int)]) + exception_msg = "Specifying `weight` not supported for structured dtypes" + with pytest.raises(ValueError, match=exception_msg): + nx.to_numpy_array(G, dtype=dtype) # Default is weight="weight" + with pytest.raises(ValueError, match=exception_msg): + nx.to_numpy_array(G, dtype=dtype, weight="cost") + + +@pytest.mark.parametrize("graph_type", (nx.MultiGraph, nx.MultiDiGraph)) +def test_to_numpy_array_structured_multigraph_raises(graph_type): + G = nx.path_graph(3, create_using=graph_type) + dtype = np.dtype([("weight", int), ("cost", int)]) + with pytest.raises(nx.NetworkXError, match="Structured arrays are not supported"): + nx.to_numpy_array(G, dtype=dtype, weight=None) + + +def test_from_numpy_array_nodelist_bad_size(): + """An exception is raised when `len(nodelist) != A.shape[0]`.""" + n = 5 # Number of nodes + A = np.diag(np.ones(n - 1), k=1) # Adj. matrix for P_n + expected = nx.path_graph(n) + + assert graphs_equal(nx.from_numpy_array(A, edge_attr=None), expected) + nodes = list(range(n)) + assert graphs_equal( + nx.from_numpy_array(A, edge_attr=None, nodelist=nodes), expected + ) + + # Too many node labels + nodes = list(range(n + 1)) + with pytest.raises(ValueError, match="nodelist must have the same length as A"): + nx.from_numpy_array(A, nodelist=nodes) + + # Too few node labels + nodes = list(range(n - 1)) + with pytest.raises(ValueError, match="nodelist must have the same length as A"): + nx.from_numpy_array(A, nodelist=nodes) + + +@pytest.mark.parametrize( + "nodes", + ( + [4, 3, 2, 1, 0], + [9, 7, 1, 2, 8], + ["a", "b", "c", "d", "e"], + [(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)], + ["A", 2, 7, "spam", (1, 3)], + ), +) +def test_from_numpy_array_nodelist(nodes): + A = np.diag(np.ones(4), k=1) + # Without edge attributes + expected = nx.relabel_nodes( + nx.path_graph(5), mapping=dict(enumerate(nodes)), copy=True + ) + G = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes) + assert graphs_equal(G, expected) + + # With edge attributes + nx.set_edge_attributes(expected, 1.0, name="weight") + G = nx.from_numpy_array(A, nodelist=nodes) + assert graphs_equal(G, expected) + + +@pytest.mark.parametrize( + "nodes", + ( + [4, 3, 2, 1, 0], + [9, 7, 1, 2, 8], + ["a", "b", "c", "d", "e"], + [(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)], + ["A", 2, 7, "spam", (1, 3)], + ), +) +def test_from_numpy_array_nodelist_directed(nodes): + A = np.diag(np.ones(4), k=1) + # Without edge attributes + H = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)]) + expected = nx.relabel_nodes(H, mapping=dict(enumerate(nodes)), copy=True) + G = nx.from_numpy_array(A, create_using=nx.DiGraph, edge_attr=None, nodelist=nodes) + assert graphs_equal(G, expected) + + # With edge attributes + nx.set_edge_attributes(expected, 1.0, name="weight") + G = nx.from_numpy_array(A, create_using=nx.DiGraph, nodelist=nodes) + assert graphs_equal(G, expected) + + +@pytest.mark.parametrize( + "nodes", + ( + [4, 3, 2, 1, 0], + [9, 7, 1, 2, 8], + ["a", "b", "c", "d", "e"], + [(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)], + ["A", 2, 7, "spam", (1, 3)], + ), +) +def test_from_numpy_array_nodelist_multigraph(nodes): + A = np.array( + [ + [0, 1, 0, 0, 0], + [1, 0, 2, 0, 0], + [0, 2, 0, 3, 0], + [0, 0, 3, 0, 4], + [0, 0, 0, 4, 0], + ] + ) + + H = nx.MultiGraph() + for i, edge in enumerate(((0, 1), (1, 2), (2, 3), (3, 4))): + H.add_edges_from(itertools.repeat(edge, i + 1)) + expected = nx.relabel_nodes(H, mapping=dict(enumerate(nodes)), copy=True) + + G = nx.from_numpy_array( + A, + parallel_edges=True, + create_using=nx.MultiGraph, + edge_attr=None, + nodelist=nodes, + ) + assert graphs_equal(G, expected) + + +@pytest.mark.parametrize( + "nodes", + ( + [4, 3, 2, 1, 0], + [9, 7, 1, 2, 8], + ["a", "b", "c", "d", "e"], + [(0, 0), (1, 1), (2, 3), (0, 2), (3, 1)], + ["A", 2, 7, "spam", (1, 3)], + ), +) +@pytest.mark.parametrize("graph", (nx.complete_graph, nx.cycle_graph, nx.wheel_graph)) +def test_from_numpy_array_nodelist_rountrip(graph, nodes): + G = graph(5) + A = nx.to_numpy_array(G) + expected = nx.relabel_nodes(G, mapping=dict(enumerate(nodes)), copy=True) + H = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes) + assert graphs_equal(H, expected) + + # With an isolated node + G = graph(4) + G.add_node("foo") + A = nx.to_numpy_array(G) + expected = nx.relabel_nodes(G, mapping=dict(zip(G.nodes, nodes)), copy=True) + H = nx.from_numpy_array(A, edge_attr=None, nodelist=nodes) + assert graphs_equal(H, expected) diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_pandas.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_pandas.py new file mode 100644 index 0000000..8c3f02a --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_pandas.py @@ -0,0 +1,349 @@ +import pytest + +import networkx as nx +from networkx.utils import edges_equal, graphs_equal, nodes_equal + +np = pytest.importorskip("numpy") +pd = pytest.importorskip("pandas") + + +class TestConvertPandas: + def setup_method(self): + self.rng = np.random.RandomState(seed=5) + ints = self.rng.randint(1, 11, size=(3, 2)) + a = ["A", "B", "C"] + b = ["D", "A", "E"] + df = pd.DataFrame(ints, columns=["weight", "cost"]) + df[0] = a # Column label 0 (int) + df["b"] = b # Column label 'b' (str) + self.df = df + + mdf = pd.DataFrame([[4, 16, "A", "D"]], columns=["weight", "cost", 0, "b"]) + self.mdf = pd.concat([df, mdf]) + + def test_exceptions(self): + G = pd.DataFrame(["a"]) # adj + pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) + G = pd.DataFrame(["a", 0.0]) # elist + pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) + df = pd.DataFrame([[1, 1], [1, 0]], dtype=int, index=[1, 2], columns=["a", "b"]) + pytest.raises(nx.NetworkXError, nx.from_pandas_adjacency, df) + + def test_from_edgelist_all_attr(self): + Gtrue = nx.Graph( + [ + ("E", "C", {"cost": 9, "weight": 10}), + ("B", "A", {"cost": 1, "weight": 7}), + ("A", "D", {"cost": 7, "weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", True) + assert graphs_equal(G, Gtrue) + # MultiGraph + MGtrue = nx.MultiGraph(Gtrue) + MGtrue.add_edge("A", "D", cost=16, weight=4) + MG = nx.from_pandas_edgelist(self.mdf, 0, "b", True, nx.MultiGraph()) + assert graphs_equal(MG, MGtrue) + + def test_from_edgelist_multi_attr(self): + Gtrue = nx.Graph( + [ + ("E", "C", {"cost": 9, "weight": 10}), + ("B", "A", {"cost": 1, "weight": 7}), + ("A", "D", {"cost": 7, "weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", ["weight", "cost"]) + assert graphs_equal(G, Gtrue) + + def test_from_edgelist_multi_attr_incl_target(self): + Gtrue = nx.Graph( + [ + ("E", "C", {0: "C", "b": "E", "weight": 10}), + ("B", "A", {0: "B", "b": "A", "weight": 7}), + ("A", "D", {0: "A", "b": "D", "weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", [0, "b", "weight"]) + assert graphs_equal(G, Gtrue) + + def test_from_edgelist_multidigraph_and_edge_attr(self): + # example from issue #2374 + edges = [ + ("X1", "X4", {"Co": "zA", "Mi": 0, "St": "X1"}), + ("X1", "X4", {"Co": "zB", "Mi": 54, "St": "X2"}), + ("X1", "X4", {"Co": "zB", "Mi": 49, "St": "X3"}), + ("X1", "X4", {"Co": "zB", "Mi": 44, "St": "X4"}), + ("Y1", "Y3", {"Co": "zC", "Mi": 0, "St": "Y1"}), + ("Y1", "Y3", {"Co": "zC", "Mi": 34, "St": "Y2"}), + ("Y1", "Y3", {"Co": "zC", "Mi": 29, "St": "X2"}), + ("Y1", "Y3", {"Co": "zC", "Mi": 24, "St": "Y3"}), + ("Z1", "Z3", {"Co": "zD", "Mi": 0, "St": "Z1"}), + ("Z1", "Z3", {"Co": "zD", "Mi": 14, "St": "X3"}), + ] + Gtrue = nx.MultiDiGraph(edges) + data = { + "O": ["X1", "X1", "X1", "X1", "Y1", "Y1", "Y1", "Y1", "Z1", "Z1"], + "D": ["X4", "X4", "X4", "X4", "Y3", "Y3", "Y3", "Y3", "Z3", "Z3"], + "St": ["X1", "X2", "X3", "X4", "Y1", "Y2", "X2", "Y3", "Z1", "X3"], + "Co": ["zA", "zB", "zB", "zB", "zC", "zC", "zC", "zC", "zD", "zD"], + "Mi": [0, 54, 49, 44, 0, 34, 29, 24, 0, 14], + } + df = pd.DataFrame.from_dict(data) + G1 = nx.from_pandas_edgelist( + df, source="O", target="D", edge_attr=True, create_using=nx.MultiDiGraph + ) + G2 = nx.from_pandas_edgelist( + df, + source="O", + target="D", + edge_attr=["St", "Co", "Mi"], + create_using=nx.MultiDiGraph, + ) + assert graphs_equal(G1, Gtrue) + assert graphs_equal(G2, Gtrue) + + def test_from_edgelist_one_attr(self): + Gtrue = nx.Graph( + [ + ("E", "C", {"weight": 10}), + ("B", "A", {"weight": 7}), + ("A", "D", {"weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", "weight") + assert graphs_equal(G, Gtrue) + + def test_from_edgelist_int_attr_name(self): + # note: this also tests that edge_attr can be `source` + Gtrue = nx.Graph( + [("E", "C", {0: "C"}), ("B", "A", {0: "B"}), ("A", "D", {0: "A"})] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", 0) + assert graphs_equal(G, Gtrue) + + def test_from_edgelist_invalid_attr(self): + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", "misspell" + ) + pytest.raises(nx.NetworkXError, nx.from_pandas_edgelist, self.df, 0, "b", 1) + # see Issue #3562 + edgeframe = pd.DataFrame([[0, 1], [1, 2], [2, 0]], columns=["s", "t"]) + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", True + ) + pytest.raises( + nx.NetworkXError, nx.from_pandas_edgelist, edgeframe, "s", "t", "weight" + ) + pytest.raises( + nx.NetworkXError, + nx.from_pandas_edgelist, + edgeframe, + "s", + "t", + ["weight", "size"], + ) + + def test_from_edgelist_no_attr(self): + Gtrue = nx.Graph([("E", "C", {}), ("B", "A", {}), ("A", "D", {})]) + G = nx.from_pandas_edgelist(self.df, 0, "b") + assert graphs_equal(G, Gtrue) + + def test_from_edgelist(self): + # Pandas DataFrame + G = nx.cycle_graph(10) + G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges)) + + edgelist = nx.to_edgelist(G) + source = [s for s, t, d in edgelist] + target = [t for s, t, d in edgelist] + weight = [d["weight"] for s, t, d in edgelist] + edges = pd.DataFrame({"source": source, "target": target, "weight": weight}) + + GG = nx.from_pandas_edgelist(edges, edge_attr="weight") + assert nodes_equal(G.nodes(), GG.nodes()) + assert edges_equal(G.edges(), GG.edges()) + GW = nx.to_networkx_graph(edges, create_using=nx.Graph) + assert nodes_equal(G.nodes(), GW.nodes()) + assert edges_equal(G.edges(), GW.edges()) + + def test_to_edgelist_default_source_or_target_col_exists(self): + G = nx.path_graph(10) + G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges)) + nx.set_edge_attributes(G, 0, name="source") + pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G) + + # drop source column to test an exception raised for the target column + for u, v, d in G.edges(data=True): + d.pop("source", None) + + nx.set_edge_attributes(G, 0, name="target") + pytest.raises(nx.NetworkXError, nx.to_pandas_edgelist, G) + + def test_to_edgelist_custom_source_or_target_col_exists(self): + G = nx.path_graph(10) + G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges)) + nx.set_edge_attributes(G, 0, name="source_col_name") + pytest.raises( + nx.NetworkXError, nx.to_pandas_edgelist, G, source="source_col_name" + ) + + # drop source column to test an exception raised for the target column + for u, v, d in G.edges(data=True): + d.pop("source_col_name", None) + + nx.set_edge_attributes(G, 0, name="target_col_name") + pytest.raises( + nx.NetworkXError, nx.to_pandas_edgelist, G, target="target_col_name" + ) + + def test_to_edgelist_edge_key_col_exists(self): + G = nx.path_graph(10, create_using=nx.MultiGraph) + G.add_weighted_edges_from((u, v, u) for u, v in list(G.edges())) + nx.set_edge_attributes(G, 0, name="edge_key_name") + pytest.raises( + nx.NetworkXError, nx.to_pandas_edgelist, G, edge_key="edge_key_name" + ) + + def test_from_adjacency(self): + nodelist = [1, 2] + dftrue = pd.DataFrame( + [[1, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist + ) + G = nx.Graph([(1, 1), (1, 2)]) + df = nx.to_pandas_adjacency(G, dtype=int) + pd.testing.assert_frame_equal(df, dftrue) + + @pytest.mark.parametrize("graph", [nx.Graph, nx.MultiGraph]) + def test_roundtrip(self, graph): + # edgelist + Gtrue = graph([(1, 1), (1, 2)]) + df = nx.to_pandas_edgelist(Gtrue) + G = nx.from_pandas_edgelist(df, create_using=graph) + assert graphs_equal(Gtrue, G) + # adjacency + adj = {1: {1: {"weight": 1}, 2: {"weight": 1}}, 2: {1: {"weight": 1}}} + Gtrue = graph(adj) + df = nx.to_pandas_adjacency(Gtrue, dtype=int) + G = nx.from_pandas_adjacency(df, create_using=graph) + assert graphs_equal(Gtrue, G) + + def test_from_adjacency_named(self): + # example from issue #3105 + data = { + "A": {"A": 0, "B": 0, "C": 0}, + "B": {"A": 1, "B": 0, "C": 0}, + "C": {"A": 0, "B": 1, "C": 0}, + } + dftrue = pd.DataFrame(data, dtype=np.intp) + df = dftrue[["A", "C", "B"]] + G = nx.from_pandas_adjacency(df, create_using=nx.DiGraph()) + df = nx.to_pandas_adjacency(G, dtype=np.intp) + pd.testing.assert_frame_equal(df, dftrue) + + @pytest.mark.parametrize("edge_attr", [["attr2", "attr3"], True]) + def test_edgekey_with_multigraph(self, edge_attr): + df = pd.DataFrame( + { + "source": {"A": "N1", "B": "N2", "C": "N1", "D": "N1"}, + "target": {"A": "N2", "B": "N3", "C": "N1", "D": "N2"}, + "attr1": {"A": "F1", "B": "F2", "C": "F3", "D": "F4"}, + "attr2": {"A": 1, "B": 0, "C": 0, "D": 0}, + "attr3": {"A": 0, "B": 1, "C": 0, "D": 1}, + } + ) + Gtrue = nx.MultiGraph( + [ + ("N1", "N2", "F1", {"attr2": 1, "attr3": 0}), + ("N2", "N3", "F2", {"attr2": 0, "attr3": 1}), + ("N1", "N1", "F3", {"attr2": 0, "attr3": 0}), + ("N1", "N2", "F4", {"attr2": 0, "attr3": 1}), + ] + ) + # example from issue #4065 + G = nx.from_pandas_edgelist( + df, + source="source", + target="target", + edge_attr=edge_attr, + edge_key="attr1", + create_using=nx.MultiGraph(), + ) + assert graphs_equal(G, Gtrue) + + df_roundtrip = nx.to_pandas_edgelist(G, edge_key="attr1") + df_roundtrip = df_roundtrip.sort_values("attr1") + df_roundtrip.index = ["A", "B", "C", "D"] + pd.testing.assert_frame_equal( + df, df_roundtrip[["source", "target", "attr1", "attr2", "attr3"]] + ) + + def test_edgekey_with_normal_graph_no_action(self): + Gtrue = nx.Graph( + [ + ("E", "C", {"cost": 9, "weight": 10}), + ("B", "A", {"cost": 1, "weight": 7}), + ("A", "D", {"cost": 7, "weight": 4}), + ] + ) + G = nx.from_pandas_edgelist(self.df, 0, "b", True, edge_key="weight") + assert graphs_equal(G, Gtrue) + + def test_nonexisting_edgekey_raises(self): + with pytest.raises(nx.exception.NetworkXError): + nx.from_pandas_edgelist( + self.df, + source="source", + target="target", + edge_key="Not_real", + edge_attr=True, + create_using=nx.MultiGraph(), + ) + + def test_multigraph_with_edgekey_no_edgeattrs(self): + Gtrue = nx.MultiGraph() + Gtrue.add_edge(0, 1, key=0) + Gtrue.add_edge(0, 1, key=3) + df = nx.to_pandas_edgelist(Gtrue, edge_key="key") + expected = pd.DataFrame({"source": [0, 0], "target": [1, 1], "key": [0, 3]}) + pd.testing.assert_frame_equal(expected, df) + G = nx.from_pandas_edgelist(df, edge_key="key", create_using=nx.MultiGraph) + assert graphs_equal(Gtrue, G) + + +def test_to_pandas_adjacency_with_nodelist(): + G = nx.complete_graph(5) + nodelist = [1, 4] + expected = pd.DataFrame( + [[0, 1], [1, 0]], dtype=int, index=nodelist, columns=nodelist + ) + pd.testing.assert_frame_equal( + expected, nx.to_pandas_adjacency(G, nodelist, dtype=int) + ) + + +def test_to_pandas_edgelist_with_nodelist(): + G = nx.Graph() + G.add_edges_from([(0, 1), (1, 2), (1, 3)], weight=2.0) + G.add_edge(0, 5, weight=100) + df = nx.to_pandas_edgelist(G, nodelist=[1, 2]) + assert 0 not in df["source"].to_numpy() + assert 100 not in df["weight"].to_numpy() + + +def test_from_pandas_adjacency_with_index_collisions(): + """See gh-7407""" + df = pd.DataFrame( + [ + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + [0, 0, 0, 0], + ], + index=[1010001, 2, 1, 1010002], + columns=[1010001, 2, 1, 1010002], + ) + G = nx.from_pandas_adjacency(df, create_using=nx.DiGraph) + expected = nx.DiGraph([(1010001, 2), (2, 1), (1, 1010002)]) + assert nodes_equal(G.nodes, expected.nodes) + assert edges_equal(G.edges, expected.edges) diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_scipy.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_scipy.py new file mode 100644 index 0000000..2575027 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_convert_scipy.py @@ -0,0 +1,281 @@ +import pytest + +import networkx as nx +from networkx.utils import graphs_equal + +np = pytest.importorskip("numpy") +sp = pytest.importorskip("scipy") + + +class TestConvertScipy: + def setup_method(self): + self.G1 = nx.barbell_graph(10, 3) + self.G2 = nx.cycle_graph(10, create_using=nx.DiGraph) + + self.G3 = self.create_weighted(nx.Graph()) + self.G4 = self.create_weighted(nx.DiGraph()) + + def test_exceptions(self): + class G: + format = None + + pytest.raises(nx.NetworkXError, nx.to_networkx_graph, G) + + def create_weighted(self, G): + g = nx.cycle_graph(4) + e = list(g.edges()) + source = [u for u, v in e] + dest = [v for u, v in e] + weight = [s + 10 for s in source] + ex = zip(source, dest, weight) + G.add_weighted_edges_from(ex) + return G + + def identity_conversion(self, G, A, create_using): + GG = nx.from_scipy_sparse_array(A, create_using=create_using) + assert nx.is_isomorphic(G, GG) + + GW = nx.to_networkx_graph(A, create_using=create_using) + assert nx.is_isomorphic(G, GW) + + GI = nx.empty_graph(0, create_using).__class__(A) + assert nx.is_isomorphic(G, GI) + + ACSR = A.tocsr() + GI = nx.empty_graph(0, create_using).__class__(ACSR) + assert nx.is_isomorphic(G, GI) + + ACOO = A.tocoo() + GI = nx.empty_graph(0, create_using).__class__(ACOO) + assert nx.is_isomorphic(G, GI) + + ACSC = A.tocsc() + GI = nx.empty_graph(0, create_using).__class__(ACSC) + assert nx.is_isomorphic(G, GI) + + AD = A.todense() + GI = nx.empty_graph(0, create_using).__class__(AD) + assert nx.is_isomorphic(G, GI) + + AA = A.toarray() + GI = nx.empty_graph(0, create_using).__class__(AA) + assert nx.is_isomorphic(G, GI) + + def test_shape(self): + "Conversion from non-square sparse array." + A = sp.sparse.lil_array([[1, 2, 3], [4, 5, 6]]) + pytest.raises(nx.NetworkXError, nx.from_scipy_sparse_array, A) + + def test_identity_graph_matrix(self): + "Conversion from graph to sparse matrix to graph." + A = nx.to_scipy_sparse_array(self.G1) + self.identity_conversion(self.G1, A, nx.Graph()) + + def test_identity_digraph_matrix(self): + "Conversion from digraph to sparse matrix to digraph." + A = nx.to_scipy_sparse_array(self.G2) + self.identity_conversion(self.G2, A, nx.DiGraph()) + + def test_identity_weighted_graph_matrix(self): + """Conversion from weighted graph to sparse matrix to weighted graph.""" + A = nx.to_scipy_sparse_array(self.G3) + self.identity_conversion(self.G3, A, nx.Graph()) + + def test_identity_weighted_digraph_matrix(self): + """Conversion from weighted digraph to sparse matrix to weighted digraph.""" + A = nx.to_scipy_sparse_array(self.G4) + self.identity_conversion(self.G4, A, nx.DiGraph()) + + def test_nodelist(self): + """Conversion from graph to sparse matrix to graph with nodelist.""" + P4 = nx.path_graph(4) + P3 = nx.path_graph(3) + nodelist = list(P3.nodes()) + A = nx.to_scipy_sparse_array(P4, nodelist=nodelist) + GA = nx.Graph(A) + assert nx.is_isomorphic(GA, P3) + + pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=[]) + # Test nodelist duplicates. + long_nl = nodelist + [0] + pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=long_nl) + + # Test nodelist contains non-nodes + non_nl = [-1, 0, 1, 2] + pytest.raises(nx.NetworkXError, nx.to_scipy_sparse_array, P3, nodelist=non_nl) + + def test_weight_keyword(self): + WP4 = nx.Graph() + WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3)) + P4 = nx.path_graph(4) + A = nx.to_scipy_sparse_array(P4) + np.testing.assert_equal( + A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense() + ) + np.testing.assert_equal( + 0.5 * A.todense(), nx.to_scipy_sparse_array(WP4).todense() + ) + np.testing.assert_equal( + 0.3 * A.todense(), nx.to_scipy_sparse_array(WP4, weight="other").todense() + ) + + def test_format_keyword(self): + WP4 = nx.Graph() + WP4.add_edges_from((n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3)) + P4 = nx.path_graph(4) + A = nx.to_scipy_sparse_array(P4, format="csr") + np.testing.assert_equal( + A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_array(P4, format="csc") + np.testing.assert_equal( + A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_array(P4, format="coo") + np.testing.assert_equal( + A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_array(P4, format="bsr") + np.testing.assert_equal( + A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_array(P4, format="lil") + np.testing.assert_equal( + A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_array(P4, format="dia") + np.testing.assert_equal( + A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense() + ) + + A = nx.to_scipy_sparse_array(P4, format="dok") + np.testing.assert_equal( + A.todense(), nx.to_scipy_sparse_array(WP4, weight=None).todense() + ) + + def test_format_keyword_raise(self): + with pytest.raises(nx.NetworkXError): + WP4 = nx.Graph() + WP4.add_edges_from( + (n, n + 1, {"weight": 0.5, "other": 0.3}) for n in range(3) + ) + P4 = nx.path_graph(4) + nx.to_scipy_sparse_array(P4, format="any_other") + + def test_null_raise(self): + with pytest.raises(nx.NetworkXError): + nx.to_scipy_sparse_array(nx.Graph()) + + def test_empty(self): + G = nx.Graph() + G.add_node(1) + M = nx.to_scipy_sparse_array(G) + np.testing.assert_equal(M.toarray(), np.array([[0]])) + + def test_ordering(self): + G = nx.DiGraph() + G.add_edge(1, 2) + G.add_edge(2, 3) + G.add_edge(3, 1) + M = nx.to_scipy_sparse_array(G, nodelist=[3, 2, 1]) + np.testing.assert_equal( + M.toarray(), np.array([[0, 0, 1], [1, 0, 0], [0, 1, 0]]) + ) + + def test_selfloop_graph(self): + G = nx.Graph([(1, 1)]) + M = nx.to_scipy_sparse_array(G) + np.testing.assert_equal(M.toarray(), np.array([[1]])) + + G.add_edges_from([(2, 3), (3, 4)]) + M = nx.to_scipy_sparse_array(G, nodelist=[2, 3, 4]) + np.testing.assert_equal( + M.toarray(), np.array([[0, 1, 0], [1, 0, 1], [0, 1, 0]]) + ) + + def test_selfloop_digraph(self): + G = nx.DiGraph([(1, 1)]) + M = nx.to_scipy_sparse_array(G) + np.testing.assert_equal(M.toarray(), np.array([[1]])) + + G.add_edges_from([(2, 3), (3, 4)]) + M = nx.to_scipy_sparse_array(G, nodelist=[2, 3, 4]) + np.testing.assert_equal( + M.toarray(), np.array([[0, 1, 0], [0, 0, 1], [0, 0, 0]]) + ) + + def test_from_scipy_sparse_array_parallel_edges(self): + """Tests that the :func:`networkx.from_scipy_sparse_array` function + interprets integer weights as the number of parallel edges when + creating a multigraph. + + """ + A = sp.sparse.csr_array([[1, 1], [1, 2]]) + # First, with a simple graph, each integer entry in the adjacency + # matrix is interpreted as the weight of a single edge in the graph. + expected = nx.DiGraph() + edges = [(0, 0), (0, 1), (1, 0)] + expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) + expected.add_edge(1, 1, weight=2) + actual = nx.from_scipy_sparse_array( + A, parallel_edges=True, create_using=nx.DiGraph + ) + assert graphs_equal(actual, expected) + actual = nx.from_scipy_sparse_array( + A, parallel_edges=False, create_using=nx.DiGraph + ) + assert graphs_equal(actual, expected) + # Now each integer entry in the adjacency matrix is interpreted as the + # number of parallel edges in the graph if the appropriate keyword + # argument is specified. + edges = [(0, 0), (0, 1), (1, 0), (1, 1), (1, 1)] + expected = nx.MultiDiGraph() + expected.add_weighted_edges_from([(u, v, 1) for (u, v) in edges]) + actual = nx.from_scipy_sparse_array( + A, parallel_edges=True, create_using=nx.MultiDiGraph + ) + assert graphs_equal(actual, expected) + expected = nx.MultiDiGraph() + expected.add_edges_from(set(edges), weight=1) + # The sole self-loop (edge 0) on vertex 1 should have weight 2. + expected[1][1][0]["weight"] = 2 + actual = nx.from_scipy_sparse_array( + A, parallel_edges=False, create_using=nx.MultiDiGraph + ) + assert graphs_equal(actual, expected) + + def test_symmetric(self): + """Tests that a symmetric matrix has edges added only once to an + undirected multigraph when using + :func:`networkx.from_scipy_sparse_array`. + + """ + A = sp.sparse.csr_array([[0, 1], [1, 0]]) + G = nx.from_scipy_sparse_array(A, create_using=nx.MultiGraph) + expected = nx.MultiGraph() + expected.add_edge(0, 1, weight=1) + assert graphs_equal(G, expected) + + +@pytest.mark.parametrize("sparse_format", ("csr", "csc", "dok")) +def test_from_scipy_sparse_array_formats(sparse_format): + """Test all formats supported by _generate_weighted_edges.""" + # trinode complete graph with non-uniform edge weights + expected = nx.Graph() + expected.add_edges_from( + [ + (0, 1, {"weight": 3}), + (0, 2, {"weight": 2}), + (1, 0, {"weight": 3}), + (1, 2, {"weight": 1}), + (2, 0, {"weight": 2}), + (2, 1, {"weight": 1}), + ] + ) + A = sp.sparse.coo_array([[0, 3, 2], [3, 0, 1], [2, 1, 0]]).asformat(sparse_format) + assert graphs_equal(expected, nx.from_scipy_sparse_array(A)) diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_exceptions.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_exceptions.py new file mode 100644 index 0000000..cf59983 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_exceptions.py @@ -0,0 +1,40 @@ +import pytest + +import networkx as nx + +# smoke tests for exceptions + + +def test_raises_networkxexception(): + with pytest.raises(nx.NetworkXException): + raise nx.NetworkXException + + +def test_raises_networkxerr(): + with pytest.raises(nx.NetworkXError): + raise nx.NetworkXError + + +def test_raises_networkx_pointless_concept(): + with pytest.raises(nx.NetworkXPointlessConcept): + raise nx.NetworkXPointlessConcept + + +def test_raises_networkxalgorithmerr(): + with pytest.raises(nx.NetworkXAlgorithmError): + raise nx.NetworkXAlgorithmError + + +def test_raises_networkx_unfeasible(): + with pytest.raises(nx.NetworkXUnfeasible): + raise nx.NetworkXUnfeasible + + +def test_raises_networkx_no_path(): + with pytest.raises(nx.NetworkXNoPath): + raise nx.NetworkXNoPath + + +def test_raises_networkx_unbounded(): + with pytest.raises(nx.NetworkXUnbounded): + raise nx.NetworkXUnbounded diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_import.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_import.py new file mode 100644 index 0000000..32aafdf --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_import.py @@ -0,0 +1,11 @@ +import pytest + + +def test_namespace_alias(): + with pytest.raises(ImportError): + from networkx import nx + + +def test_namespace_nesting(): + with pytest.raises(ImportError): + from networkx import networkx diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_lazy_imports.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_lazy_imports.py new file mode 100644 index 0000000..ec09ac2 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_lazy_imports.py @@ -0,0 +1,96 @@ +import sys +import types + +import pytest + +import networkx.lazy_imports as lazy + + +def test_lazy_import_basics(): + math = lazy._lazy_import("math") + anything_not_real = lazy._lazy_import("anything_not_real") + + # Now test that accessing attributes does what it should + assert math.sin(math.pi) == pytest.approx(0, 1e-6) + # poor-mans pytest.raises for testing errors on attribute access + try: + anything_not_real.pi + assert False # Should not get here + except ModuleNotFoundError: + pass + assert isinstance(anything_not_real, lazy.DelayedImportErrorModule) + # see if it changes for second access + try: + anything_not_real.pi + assert False # Should not get here + except ModuleNotFoundError: + pass + + +def test_lazy_import_impact_on_sys_modules(): + math = lazy._lazy_import("math") + anything_not_real = lazy._lazy_import("anything_not_real") + + assert isinstance(math, types.ModuleType) + assert "math" in sys.modules + assert type(anything_not_real) is lazy.DelayedImportErrorModule + assert "anything_not_real" not in sys.modules + + # only do this if numpy is installed + np_test = pytest.importorskip("numpy") + np = lazy._lazy_import("numpy") + assert isinstance(np, types.ModuleType) + assert "numpy" in sys.modules + + np.pi # trigger load of numpy + + assert isinstance(np, types.ModuleType) + assert "numpy" in sys.modules + + +def test_lazy_import_nonbuiltins(): + sp = lazy._lazy_import("scipy") + np = lazy._lazy_import("numpy") + if isinstance(sp, lazy.DelayedImportErrorModule): + try: + sp.special.erf + assert False + except ModuleNotFoundError: + pass + elif isinstance(np, lazy.DelayedImportErrorModule): + try: + np.sin(np.pi) + assert False + except ModuleNotFoundError: + pass + else: + assert sp.special.erf(np.pi) == pytest.approx(1, 1e-4) + + +def test_lazy_attach(): + name = "mymod" + submods = ["mysubmodule", "anothersubmodule"] + myall = {"not_real_submod": ["some_var_or_func"]} + + locls = { + "attach": lazy.attach, + "name": name, + "submods": submods, + "myall": myall, + } + s = "__getattr__, __lazy_dir__, __all__ = attach(name, submods, myall)" + + exec(s, {}, locls) + expected = { + "attach": lazy.attach, + "name": name, + "submods": submods, + "myall": myall, + "__getattr__": None, + "__lazy_dir__": None, + "__all__": None, + } + assert locls.keys() == expected.keys() + for k, v in expected.items(): + if v is not None: + assert locls[k] == v diff --git a/.venv/lib/python3.12/site-packages/networkx/tests/test_relabel.py b/.venv/lib/python3.12/site-packages/networkx/tests/test_relabel.py new file mode 100644 index 0000000..0ebf4d3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/tests/test_relabel.py @@ -0,0 +1,347 @@ +import pytest + +import networkx as nx +from networkx.generators.classic import empty_graph +from networkx.utils import edges_equal, nodes_equal + + +class TestRelabel: + def test_convert_node_labels_to_integers(self): + # test that empty graph converts fine for all options + G = empty_graph() + H = nx.convert_node_labels_to_integers(G, 100) + assert list(H.nodes()) == [] + assert list(H.edges()) == [] + + for opt in ["default", "sorted", "increasing degree", "decreasing degree"]: + G = empty_graph() + H = nx.convert_node_labels_to_integers(G, 100, ordering=opt) + assert list(H.nodes()) == [] + assert list(H.edges()) == [] + + G = empty_graph() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + H = nx.convert_node_labels_to_integers(G) + degH = (d for n, d in H.degree()) + degG = (d for n, d in G.degree()) + assert sorted(degH) == sorted(degG) + + H = nx.convert_node_labels_to_integers(G, 1000) + degH = (d for n, d in H.degree()) + degG = (d for n, d in G.degree()) + assert sorted(degH) == sorted(degG) + assert nodes_equal(H.nodes(), [1000, 1001, 1002, 1003]) + + H = nx.convert_node_labels_to_integers(G, ordering="increasing degree") + degH = (d for n, d in H.degree()) + degG = (d for n, d in G.degree()) + assert sorted(degH) == sorted(degG) + assert H.degree(0) == 1 + assert H.degree(1) == 2 + assert H.degree(2) == 2 + assert H.degree(3) == 3 + + H = nx.convert_node_labels_to_integers(G, ordering="decreasing degree") + degH = (d for n, d in H.degree()) + degG = (d for n, d in G.degree()) + assert sorted(degH) == sorted(degG) + assert H.degree(0) == 3 + assert H.degree(1) == 2 + assert H.degree(2) == 2 + assert H.degree(3) == 1 + + H = nx.convert_node_labels_to_integers( + G, ordering="increasing degree", label_attribute="label" + ) + degH = (d for n, d in H.degree()) + degG = (d for n, d in G.degree()) + assert sorted(degH) == sorted(degG) + assert H.degree(0) == 1 + assert H.degree(1) == 2 + assert H.degree(2) == 2 + assert H.degree(3) == 3 + + # check mapping + assert H.nodes[3]["label"] == "C" + assert H.nodes[0]["label"] == "D" + assert H.nodes[1]["label"] == "A" or H.nodes[2]["label"] == "A" + assert H.nodes[1]["label"] == "B" or H.nodes[2]["label"] == "B" + + def test_convert_to_integers2(self): + G = empty_graph() + G.add_edges_from([("C", "D"), ("A", "B"), ("A", "C"), ("B", "C")]) + H = nx.convert_node_labels_to_integers(G, ordering="sorted") + degH = (d for n, d in H.degree()) + degG = (d for n, d in G.degree()) + assert sorted(degH) == sorted(degG) + + H = nx.convert_node_labels_to_integers( + G, ordering="sorted", label_attribute="label" + ) + assert H.nodes[0]["label"] == "A" + assert H.nodes[1]["label"] == "B" + assert H.nodes[2]["label"] == "C" + assert H.nodes[3]["label"] == "D" + + def test_convert_to_integers_raise(self): + with pytest.raises(nx.NetworkXError): + G = nx.Graph() + H = nx.convert_node_labels_to_integers(G, ordering="increasing age") + + def test_relabel_nodes_copy(self): + G = nx.empty_graph() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"} + H = nx.relabel_nodes(G, mapping) + assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"]) + + def test_relabel_nodes_function(self): + G = nx.empty_graph() + G.add_edges_from([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + # function mapping no longer encouraged but works + + def mapping(n): + return ord(n) + + H = nx.relabel_nodes(G, mapping) + assert nodes_equal(H.nodes(), [65, 66, 67, 68]) + + def test_relabel_nodes_callable_type(self): + G = nx.path_graph(4) + H = nx.relabel_nodes(G, str) + assert nodes_equal(H.nodes, ["0", "1", "2", "3"]) + + @pytest.mark.parametrize("non_mc", ("0123", ["0", "1", "2", "3"])) + def test_relabel_nodes_non_mapping_or_callable(self, non_mc): + """If `mapping` is neither a Callable or a Mapping, an exception + should be raised.""" + G = nx.path_graph(4) + with pytest.raises(AttributeError): + nx.relabel_nodes(G, non_mc) + + def test_relabel_nodes_graph(self): + G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"} + H = nx.relabel_nodes(G, mapping) + assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"]) + + def test_relabel_nodes_orderedgraph(self): + G = nx.Graph() + G.add_nodes_from([1, 2, 3]) + G.add_edges_from([(1, 3), (2, 3)]) + mapping = {1: "a", 2: "b", 3: "c"} + H = nx.relabel_nodes(G, mapping) + assert list(H.nodes) == ["a", "b", "c"] + + def test_relabel_nodes_digraph(self): + G = nx.DiGraph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + mapping = {"A": "aardvark", "B": "bear", "C": "cat", "D": "dog"} + H = nx.relabel_nodes(G, mapping, copy=False) + assert nodes_equal(H.nodes(), ["aardvark", "bear", "cat", "dog"]) + + def test_relabel_nodes_multigraph(self): + G = nx.MultiGraph([("a", "b"), ("a", "b")]) + mapping = {"a": "aardvark", "b": "bear"} + G = nx.relabel_nodes(G, mapping, copy=False) + assert nodes_equal(G.nodes(), ["aardvark", "bear"]) + assert edges_equal(G.edges(), [("aardvark", "bear"), ("aardvark", "bear")]) + + def test_relabel_nodes_multidigraph(self): + G = nx.MultiDiGraph([("a", "b"), ("a", "b")]) + mapping = {"a": "aardvark", "b": "bear"} + G = nx.relabel_nodes(G, mapping, copy=False) + assert nodes_equal(G.nodes(), ["aardvark", "bear"]) + assert edges_equal(G.edges(), [("aardvark", "bear"), ("aardvark", "bear")]) + + def test_relabel_isolated_nodes_to_same(self): + G = nx.Graph() + G.add_nodes_from(range(4)) + mapping = {1: 1} + H = nx.relabel_nodes(G, mapping, copy=False) + assert nodes_equal(H.nodes(), list(range(4))) + + def test_relabel_nodes_missing(self): + G = nx.Graph([("A", "B"), ("A", "C"), ("B", "C"), ("C", "D")]) + mapping = {0: "aardvark"} + # copy=True + H = nx.relabel_nodes(G, mapping, copy=True) + assert nodes_equal(H.nodes, G.nodes) + # copy=False + GG = G.copy() + nx.relabel_nodes(G, mapping, copy=False) + assert nodes_equal(G.nodes, GG.nodes) + + def test_relabel_copy_name(self): + G = nx.Graph() + H = nx.relabel_nodes(G, {}, copy=True) + assert H.graph == G.graph + H = nx.relabel_nodes(G, {}, copy=False) + assert H.graph == G.graph + G.name = "first" + H = nx.relabel_nodes(G, {}, copy=True) + assert H.graph == G.graph + H = nx.relabel_nodes(G, {}, copy=False) + assert H.graph == G.graph + + def test_relabel_toposort(self): + K5 = nx.complete_graph(4) + G = nx.complete_graph(4) + G = nx.relabel_nodes(G, {i: i + 1 for i in range(4)}, copy=False) + assert nx.is_isomorphic(K5, G) + G = nx.complete_graph(4) + G = nx.relabel_nodes(G, {i: i - 1 for i in range(4)}, copy=False) + assert nx.is_isomorphic(K5, G) + + def test_relabel_selfloop(self): + G = nx.DiGraph([(1, 1), (1, 2), (2, 3)]) + G = nx.relabel_nodes(G, {1: "One", 2: "Two", 3: "Three"}, copy=False) + assert nodes_equal(G.nodes(), ["One", "Three", "Two"]) + G = nx.MultiDiGraph([(1, 1), (1, 2), (2, 3)]) + G = nx.relabel_nodes(G, {1: "One", 2: "Two", 3: "Three"}, copy=False) + assert nodes_equal(G.nodes(), ["One", "Three", "Two"]) + G = nx.MultiDiGraph([(1, 1)]) + G = nx.relabel_nodes(G, {1: 0}, copy=False) + assert nodes_equal(G.nodes(), [0]) + + def test_relabel_multidigraph_inout_merge_nodes(self): + for MG in (nx.MultiGraph, nx.MultiDiGraph): + for cc in (True, False): + G = MG([(0, 4), (1, 4), (4, 2), (4, 3)]) + G[0][4][0]["value"] = "a" + G[1][4][0]["value"] = "b" + G[4][2][0]["value"] = "c" + G[4][3][0]["value"] = "d" + G.add_edge(0, 4, key="x", value="e") + G.add_edge(4, 3, key="x", value="f") + mapping = {0: 9, 1: 9, 2: 9, 3: 9} + H = nx.relabel_nodes(G, mapping, copy=cc) + # No ordering on keys enforced + assert {"value": "a"} in H[9][4].values() + assert {"value": "b"} in H[9][4].values() + assert {"value": "c"} in H[4][9].values() + assert len(H[4][9]) == 3 if G.is_directed() else 6 + assert {"value": "d"} in H[4][9].values() + assert {"value": "e"} in H[9][4].values() + assert {"value": "f"} in H[4][9].values() + assert len(H[9][4]) == 3 if G.is_directed() else 6 + + def test_relabel_multigraph_merge_inplace(self): + G = nx.MultiGraph([(0, 1), (0, 2), (0, 3), (0, 1), (0, 2), (0, 3)]) + G[0][1][0]["value"] = "a" + G[0][2][0]["value"] = "b" + G[0][3][0]["value"] = "c" + mapping = {1: 4, 2: 4, 3: 4} + nx.relabel_nodes(G, mapping, copy=False) + # No ordering on keys enforced + assert {"value": "a"} in G[0][4].values() + assert {"value": "b"} in G[0][4].values() + assert {"value": "c"} in G[0][4].values() + + def test_relabel_multidigraph_merge_inplace(self): + G = nx.MultiDiGraph([(0, 1), (0, 2), (0, 3)]) + G[0][1][0]["value"] = "a" + G[0][2][0]["value"] = "b" + G[0][3][0]["value"] = "c" + mapping = {1: 4, 2: 4, 3: 4} + nx.relabel_nodes(G, mapping, copy=False) + # No ordering on keys enforced + assert {"value": "a"} in G[0][4].values() + assert {"value": "b"} in G[0][4].values() + assert {"value": "c"} in G[0][4].values() + + def test_relabel_multidigraph_inout_copy(self): + G = nx.MultiDiGraph([(0, 4), (1, 4), (4, 2), (4, 3)]) + G[0][4][0]["value"] = "a" + G[1][4][0]["value"] = "b" + G[4][2][0]["value"] = "c" + G[4][3][0]["value"] = "d" + G.add_edge(0, 4, key="x", value="e") + G.add_edge(4, 3, key="x", value="f") + mapping = {0: 9, 1: 9, 2: 9, 3: 9} + H = nx.relabel_nodes(G, mapping, copy=True) + # No ordering on keys enforced + assert {"value": "a"} in H[9][4].values() + assert {"value": "b"} in H[9][4].values() + assert {"value": "c"} in H[4][9].values() + assert len(H[4][9]) == 3 + assert {"value": "d"} in H[4][9].values() + assert {"value": "e"} in H[9][4].values() + assert {"value": "f"} in H[4][9].values() + assert len(H[9][4]) == 3 + + def test_relabel_multigraph_merge_copy(self): + G = nx.MultiGraph([(0, 1), (0, 2), (0, 3)]) + G[0][1][0]["value"] = "a" + G[0][2][0]["value"] = "b" + G[0][3][0]["value"] = "c" + mapping = {1: 4, 2: 4, 3: 4} + H = nx.relabel_nodes(G, mapping, copy=True) + assert {"value": "a"} in H[0][4].values() + assert {"value": "b"} in H[0][4].values() + assert {"value": "c"} in H[0][4].values() + + def test_relabel_multidigraph_merge_copy(self): + G = nx.MultiDiGraph([(0, 1), (0, 2), (0, 3)]) + G[0][1][0]["value"] = "a" + G[0][2][0]["value"] = "b" + G[0][3][0]["value"] = "c" + mapping = {1: 4, 2: 4, 3: 4} + H = nx.relabel_nodes(G, mapping, copy=True) + assert {"value": "a"} in H[0][4].values() + assert {"value": "b"} in H[0][4].values() + assert {"value": "c"} in H[0][4].values() + + def test_relabel_multigraph_nonnumeric_key(self): + for MG in (nx.MultiGraph, nx.MultiDiGraph): + for cc in (True, False): + G = nx.MultiGraph() + G.add_edge(0, 1, key="I", value="a") + G.add_edge(0, 2, key="II", value="b") + G.add_edge(0, 3, key="II", value="c") + mapping = {1: 4, 2: 4, 3: 4} + nx.relabel_nodes(G, mapping, copy=False) + assert {"value": "a"} in G[0][4].values() + assert {"value": "b"} in G[0][4].values() + assert {"value": "c"} in G[0][4].values() + assert 0 in G[0][4] + assert "I" in G[0][4] + assert "II" in G[0][4] + + def test_relabel_circular(self): + G = nx.path_graph(3) + mapping = {0: 1, 1: 0} + H = nx.relabel_nodes(G, mapping, copy=True) + with pytest.raises(nx.NetworkXUnfeasible): + H = nx.relabel_nodes(G, mapping, copy=False) + + def test_relabel_preserve_node_order_full_mapping_with_copy_true(self): + G = nx.path_graph(3) + original_order = list(G.nodes()) + mapping = {2: "a", 1: "b", 0: "c"} # dictionary keys out of order on purpose + H = nx.relabel_nodes(G, mapping, copy=True) + new_order = list(H.nodes()) + assert [mapping.get(i, i) for i in original_order] == new_order + + def test_relabel_preserve_node_order_full_mapping_with_copy_false(self): + G = nx.path_graph(3) + original_order = list(G) + mapping = {2: "a", 1: "b", 0: "c"} # dictionary keys out of order on purpose + H = nx.relabel_nodes(G, mapping, copy=False) + new_order = list(H) + assert [mapping.get(i, i) for i in original_order] == new_order + + def test_relabel_preserve_node_order_partial_mapping_with_copy_true(self): + G = nx.path_graph(3) + original_order = list(G) + mapping = {1: "a", 0: "b"} # partial mapping and keys out of order on purpose + H = nx.relabel_nodes(G, mapping, copy=True) + new_order = list(H) + assert [mapping.get(i, i) for i in original_order] == new_order + + def test_relabel_preserve_node_order_partial_mapping_with_copy_false(self): + G = nx.path_graph(3) + original_order = list(G) + mapping = {1: "a", 0: "b"} # partial mapping and keys out of order on purpose + H = nx.relabel_nodes(G, mapping, copy=False) + new_order = list(H) + assert [mapping.get(i, i) for i in original_order] != new_order diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__init__.py b/.venv/lib/python3.12/site-packages/networkx/utils/__init__.py new file mode 100644 index 0000000..d6abb17 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/__init__.py @@ -0,0 +1,8 @@ +from networkx.utils.misc import * +from networkx.utils.decorators import * +from networkx.utils.random_sequence import * +from networkx.utils.union_find import * +from networkx.utils.rcm import * +from networkx.utils.heaps import * +from networkx.utils.configs import * +from networkx.utils.backends import * diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..6e4127c Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/backends.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/backends.cpython-312.pyc new file mode 100644 index 0000000..4b1f153 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/backends.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/configs.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/configs.cpython-312.pyc new file mode 100644 index 0000000..a9d6158 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/configs.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/decorators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/decorators.cpython-312.pyc new file mode 100644 index 0000000..28c3242 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/decorators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/heaps.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/heaps.cpython-312.pyc new file mode 100644 index 0000000..a3e9e47 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/heaps.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/mapped_queue.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/mapped_queue.cpython-312.pyc new file mode 100644 index 0000000..b887377 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/mapped_queue.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/misc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/misc.cpython-312.pyc new file mode 100644 index 0000000..e4a48dc Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/misc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/random_sequence.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/random_sequence.cpython-312.pyc new file mode 100644 index 0000000..f2ea87b Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/random_sequence.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/rcm.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/rcm.cpython-312.pyc new file mode 100644 index 0000000..512e518 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/rcm.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/union_find.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/union_find.cpython-312.pyc new file mode 100644 index 0000000..a0b5653 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/__pycache__/union_find.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/backends.py b/.venv/lib/python3.12/site-packages/networkx/utils/backends.py new file mode 100644 index 0000000..1a9250c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/backends.py @@ -0,0 +1,2143 @@ +# Notes about NetworkX namespace objects set up here: +# +# nx.utils.backends.backends: +# dict keyed by backend name to the backend entry point object. +# Filled using ``_get_backends("networkx.backends")`` during import of this module. +# +# nx.utils.backends.backend_info: +# dict keyed by backend name to the metadata returned by the function indicated +# by the "networkx.backend_info" entry point. +# Created as an empty dict while importing this module, but later filled using +# ``_set_configs_from_environment()`` at end of importing ``networkx/__init__.py``. +# +# nx.config: +# Config object for NetworkX config setting. Created using +# ``_set_configs_from_environment()`` at end of importing ``networkx/__init__.py``. +# +# private dicts: +# nx.utils.backends._loaded_backends: +# dict used to memoize loaded backends. Keyed by backend name to loaded backends. +# +# nx.utils.backends._registered_algorithms: +# dict of all the dispatchable functions in networkx, keyed by _dispatchable +# function name to the wrapped function object. + +import inspect +import itertools +import logging +import os +import typing +import warnings +from functools import partial +from importlib.metadata import entry_points + +import networkx as nx + +from .configs import BackendPriorities, Config, NetworkXConfig +from .decorators import argmap + +__all__ = ["_dispatchable"] + +_logger = logging.getLogger(__name__) +FAILED_TO_CONVERT = "FAILED_TO_CONVERT" + + +def _get_backends(group, *, load_and_call=False): + """ + Retrieve NetworkX ``backends`` and ``backend_info`` from the entry points. + + Parameters + ----------- + group : str + The entry_point to be retrieved. + load_and_call : bool, optional + If True, load and call the backend. Defaults to False. + + Returns + -------- + dict + A dictionary mapping backend names to their respective backend objects. + + Notes + ------ + If a backend is defined more than once, a warning is issued. + The "nx_loopback" backend is removed if it exists, as it is only available during testing. + A warning is displayed if an error occurs while loading a backend. + """ + items = entry_points(group=group) + rv = {} + for ep in items: + if ep.name in rv: + warnings.warn( + f"networkx backend defined more than once: {ep.name}", + RuntimeWarning, + stacklevel=2, + ) + elif load_and_call: + try: + rv[ep.name] = ep.load()() + except Exception as exc: + warnings.warn( + f"Error encountered when loading info for backend {ep.name}: {exc}", + RuntimeWarning, + stacklevel=2, + ) + else: + rv[ep.name] = ep + rv.pop("nx_loopback", None) + return rv + + +# Note: "networkx" is in `backend_info` but ignored in `backends` and `config.backends`. +# It is valid to use "networkx" as a backend argument and in `config.backend_priority`. +# If we make "networkx" a "proper" backend, put it in `backends` and `config.backends`. +backends = _get_backends("networkx.backends") + +# Use _set_configs_from_environment() below to fill backend_info dict as +# the last step in importing networkx +backend_info = {} + +# Load and cache backends on-demand +_loaded_backends = {} # type: ignore[var-annotated] +_registered_algorithms = {} + + +# Get default configuration from environment variables at import time +def _comma_sep_to_list(string): + return [x_strip for x in string.strip().split(",") if (x_strip := x.strip())] + + +def _set_configs_from_environment(): + """Initialize ``config.backend_priority``, load backend_info and config. + + This gets default values from environment variables (see ``nx.config`` for details). + This function is run at the very end of importing networkx. It is run at this time + to avoid loading backend_info before the rest of networkx is imported in case a + backend uses networkx for its backend_info (e.g. subclassing the Config class.) + """ + # backend_info is defined above as empty dict. Fill it after import finishes. + backend_info.update(_get_backends("networkx.backend_info", load_and_call=True)) + backend_info.update( + (backend, {}) for backend in backends.keys() - backend_info.keys() + ) + + # set up config based on backend_info and environment + backend_config = {} + for backend, info in backend_info.items(): + if "default_config" not in info: + cfg = Config() + else: + cfg = info["default_config"] + if not isinstance(cfg, Config): + cfg = Config(**cfg) + backend_config[backend] = cfg + backend_config = Config(**backend_config) + # Setting doc of backends_config type is not setting doc of Config + # Config has __new__ method that returns instance with a unique type! + type(backend_config).__doc__ = "All installed NetworkX backends and their configs." + + backend_priority = BackendPriorities(algos=[], generators=[]) + + config = NetworkXConfig( + backend_priority=backend_priority, + backends=backend_config, + cache_converted_graphs=bool( + os.environ.get("NETWORKX_CACHE_CONVERTED_GRAPHS", True) + ), + fallback_to_nx=bool(os.environ.get("NETWORKX_FALLBACK_TO_NX", False)), + warnings_to_ignore=set( + _comma_sep_to_list(os.environ.get("NETWORKX_WARNINGS_TO_IGNORE", "")) + ), + ) + + # Add "networkx" item to backend_info now b/c backend_config is set up + backend_info["networkx"] = {} + + # NETWORKX_BACKEND_PRIORITY is the same as NETWORKX_BACKEND_PRIORITY_ALGOS + priorities = { + key[26:].lower(): val + for key, val in os.environ.items() + if key.startswith("NETWORKX_BACKEND_PRIORITY_") + } + backend_priority = config.backend_priority + backend_priority.algos = ( + _comma_sep_to_list(priorities.pop("algos")) + if "algos" in priorities + else _comma_sep_to_list( + os.environ.get( + "NETWORKX_BACKEND_PRIORITY", + os.environ.get("NETWORKX_AUTOMATIC_BACKENDS", ""), + ) + ) + ) + backend_priority.generators = _comma_sep_to_list(priorities.pop("generators", "")) + for key in sorted(priorities): + backend_priority[key] = _comma_sep_to_list(priorities[key]) + + return config + + +def _do_nothing(): + """This does nothing at all, yet it helps turn ``_dispatchable`` into functions. + + Use this with the ``argmap`` decorator to turn ``self`` into a function. It results + in some small additional overhead compared to calling ``_dispatchable`` directly, + but ``argmap`` has the property that it can stack with other ``argmap`` + decorators "for free". Being a function is better for REPRs and type-checkers. + """ + + +def _always_run(name, args, kwargs): + return True + + +def _load_backend(backend_name): + if backend_name in _loaded_backends: + return _loaded_backends[backend_name] + if backend_name not in backends: + raise ImportError(f"'{backend_name}' backend is not installed") + rv = _loaded_backends[backend_name] = backends[backend_name].load() + if not hasattr(rv, "can_run"): + rv.can_run = _always_run + if not hasattr(rv, "should_run"): + rv.should_run = _always_run + return rv + + +class _dispatchable: + _is_testing = False + + def __new__( + cls, + func=None, + *, + name=None, + graphs="G", + edge_attrs=None, + node_attrs=None, + preserve_edge_attrs=False, + preserve_node_attrs=False, + preserve_graph_attrs=False, + preserve_all_attrs=False, + mutates_input=False, + returns_graph=False, + implemented_by_nx=True, + ): + """A decorator function that is used to redirect the execution of ``func`` + function to its backend implementation. + + This decorator allows the function to dispatch to different backend + implementations based on the input graph types, and also manages the + extra keywords ``backend`` and ``**backend_kwargs``. + Usage can be any of the following decorator forms: + + - ``@_dispatchable`` + - ``@_dispatchable()`` + - ``@_dispatchable(name="override_name")`` + - ``@_dispatchable(graphs="graph_var_name")`` + - ``@_dispatchable(edge_attrs="weight")`` + - ``@_dispatchable(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"})`` + with 0 and 1 giving the position in the signature function for graph + objects. When ``edge_attrs`` is a dict, keys are keyword names and values + are defaults. + + Parameters + ---------- + func : callable, optional (default: None) + The function to be decorated. If None, ``_dispatchable`` returns a + partial object that can be used to decorate a function later. If ``func`` + is a callable, returns a new callable object that dispatches to a backend + function based on input graph types. + + name : str, optional (default: name of `func`) + The name for the function as used for dispatching. If not provided, + the name of ``func`` will be used. ``name`` is useful to avoid name + conflicts, as all dispatched functions live in a single namespace. + For example, ``nx.tournament.is_strongly_connected`` had a name + conflict with the standard ``nx.is_strongly_connected``, so we used + ``@_dispatchable(name="tournament_is_strongly_connected")``. + + graphs : str or dict or None, optional (default: "G") + If a string, the parameter name of the graph, which must be the first + argument of the wrapped function. If more than one graph is required + for the function (or if the graph is not the first argument), provide + a dict keyed by graph parameter name to the value parameter position. + A question mark in the name indicates an optional argument. + For example, ``@_dispatchable(graphs={"G": 0, "auxiliary?": 4})`` + indicates the 0th parameter ``G`` of the function is a required graph, + and the 4th parameter ``auxiliary?`` is an optional graph. + To indicate that an argument is a list of graphs, do ``"[graphs]"``. + Use ``graphs=None``, if *no* arguments are NetworkX graphs such as for + graph generators, readers, and conversion functions. + + edge_attrs : str or dict, optional (default: None) + ``edge_attrs`` holds information about edge attribute arguments + and default values for those edge attributes. + If a string, ``edge_attrs`` holds the function argument name that + indicates a single edge attribute to include in the converted graph. + The default value for this attribute is 1. To indicate that an argument + is a list of attributes (all with default value 1), use e.g. ``"[attrs]"``. + If a dict, ``edge_attrs`` holds a dict keyed by argument names, with + values that are either the default value or, if a string, the argument + name that indicates the default value. + If None, function does not use edge attributes. + + node_attrs : str or dict, optional + Like ``edge_attrs``, but for node attributes. + + preserve_edge_attrs : bool or str or dict, optional (default: False) + If bool, whether to preserve all edge attributes. + If a string, the parameter name that may indicate (with ``True`` or a + callable argument) whether all edge attributes should be preserved + when converting graphs to a backend graph type. + If a dict of form ``{graph_name: {attr: default}}``, indicate + pre-determined edge attributes (and defaults) to preserve for the + indicated input graph. + + preserve_node_attrs : bool or str or dict, optional (default: False) + Like ``preserve_edge_attrs``, but for node attributes. + + preserve_graph_attrs : bool or set, optional (default: False) + If bool, whether to preserve all graph attributes. + If set, which input graph arguments to preserve graph attributes. + + preserve_all_attrs : bool, optional (default: False) + Whether to preserve all edge, node and graph attributes. + If True, this overrides all the other preserve_*_attrs. + + mutates_input : bool or dict, optional (default: False) + If bool, whether the function mutates an input graph argument. + If dict of ``{arg_name: arg_pos}``, name and position of bool arguments + that indicate whether an input graph will be mutated, and ``arg_name`` + may begin with ``"not "`` to negate the logic (for example, ``"not copy"`` + means we mutate the input graph when the ``copy`` argument is False). + By default, dispatching doesn't convert input graphs to a different + backend for functions that mutate input graphs. + + returns_graph : bool, optional (default: False) + Whether the function can return or yield a graph object. By default, + dispatching doesn't convert input graphs to a different backend for + functions that return graphs. + + implemented_by_nx : bool, optional (default: True) + Whether the function is implemented by NetworkX. If it is not, then the + function is included in NetworkX only as an API to dispatch to backends. + Default is True. + """ + if func is None: + return partial( + _dispatchable, + name=name, + graphs=graphs, + edge_attrs=edge_attrs, + node_attrs=node_attrs, + preserve_edge_attrs=preserve_edge_attrs, + preserve_node_attrs=preserve_node_attrs, + preserve_graph_attrs=preserve_graph_attrs, + preserve_all_attrs=preserve_all_attrs, + mutates_input=mutates_input, + returns_graph=returns_graph, + implemented_by_nx=implemented_by_nx, + ) + if isinstance(func, str): + raise TypeError("'name' and 'graphs' must be passed by keyword") from None + # If name not provided, use the name of the function + if name is None: + name = func.__name__ + + self = object.__new__(cls) + + # standard function-wrapping stuff + # __annotations__ not used + self.__name__ = func.__name__ + # self.__doc__ = func.__doc__ # __doc__ handled as cached property + self.__defaults__ = func.__defaults__ + # Add `backend=` keyword argument to allow backend choice at call-time + if func.__kwdefaults__: + self.__kwdefaults__ = {**func.__kwdefaults__, "backend": None} + else: + self.__kwdefaults__ = {"backend": None} + self.__module__ = func.__module__ + self.__qualname__ = func.__qualname__ + self.__dict__.update(func.__dict__) + self.__wrapped__ = func + + # Supplement docstring with backend info; compute and cache when needed + self._orig_doc = func.__doc__ + self._cached_doc = None + + self.orig_func = func + self.name = name + self.edge_attrs = edge_attrs + self.node_attrs = node_attrs + self.preserve_edge_attrs = preserve_edge_attrs or preserve_all_attrs + self.preserve_node_attrs = preserve_node_attrs or preserve_all_attrs + self.preserve_graph_attrs = preserve_graph_attrs or preserve_all_attrs + self.mutates_input = mutates_input + # Keep `returns_graph` private for now, b/c we may extend info on return types + self._returns_graph = returns_graph + + if edge_attrs is not None and not isinstance(edge_attrs, str | dict): + raise TypeError( + f"Bad type for edge_attrs: {type(edge_attrs)}. Expected str or dict." + ) from None + if node_attrs is not None and not isinstance(node_attrs, str | dict): + raise TypeError( + f"Bad type for node_attrs: {type(node_attrs)}. Expected str or dict." + ) from None + if not isinstance(self.preserve_edge_attrs, bool | str | dict): + raise TypeError( + f"Bad type for preserve_edge_attrs: {type(self.preserve_edge_attrs)}." + " Expected bool, str, or dict." + ) from None + if not isinstance(self.preserve_node_attrs, bool | str | dict): + raise TypeError( + f"Bad type for preserve_node_attrs: {type(self.preserve_node_attrs)}." + " Expected bool, str, or dict." + ) from None + if not isinstance(self.preserve_graph_attrs, bool | set): + raise TypeError( + f"Bad type for preserve_graph_attrs: {type(self.preserve_graph_attrs)}." + " Expected bool or set." + ) from None + if not isinstance(self.mutates_input, bool | dict): + raise TypeError( + f"Bad type for mutates_input: {type(self.mutates_input)}." + " Expected bool or dict." + ) from None + if not isinstance(self._returns_graph, bool): + raise TypeError( + f"Bad type for returns_graph: {type(self._returns_graph)}." + " Expected bool." + ) from None + + if isinstance(graphs, str): + graphs = {graphs: 0} + elif graphs is None: + pass + elif not isinstance(graphs, dict): + raise TypeError( + f"Bad type for graphs: {type(graphs)}. Expected str or dict." + ) from None + elif len(graphs) == 0: + raise KeyError("'graphs' must contain at least one variable name") from None + + # This dict comprehension is complicated for better performance; equivalent shown below. + self.optional_graphs = set() + self.list_graphs = set() + if graphs is None: + self.graphs = {} + else: + self.graphs = { + self.optional_graphs.add(val := k[:-1]) or val + if (last := k[-1]) == "?" + else self.list_graphs.add(val := k[1:-1]) or val + if last == "]" + else k: v + for k, v in graphs.items() + } + # The above is equivalent to: + # self.optional_graphs = {k[:-1] for k in graphs if k[-1] == "?"} + # self.list_graphs = {k[1:-1] for k in graphs if k[-1] == "]"} + # self.graphs = {k[:-1] if k[-1] == "?" else k: v for k, v in graphs.items()} + + # Compute and cache the signature on-demand + self._sig = None + + # Which backends implement this function? + self.backends = { + backend + for backend, info in backend_info.items() + if "functions" in info and name in info["functions"] + } + if implemented_by_nx: + self.backends.add("networkx") + + if name in _registered_algorithms: + raise KeyError( + f"Algorithm already exists in dispatch registry: {name}" + ) from None + # Use the `argmap` decorator to turn `self` into a function. This does result + # in small additional overhead compared to calling `_dispatchable` directly, + # but `argmap` has the property that it can stack with other `argmap` + # decorators "for free". Being a function is better for REPRs and type-checkers. + self = argmap(_do_nothing)(self) + _registered_algorithms[name] = self + return self + + @property + def __doc__(self): + """If the cached documentation exists, it is returned. + Otherwise, the documentation is generated using _make_doc() method, + cached, and then returned.""" + + rv = self._cached_doc + if rv is None: + rv = self._cached_doc = self._make_doc() + return rv + + @__doc__.setter + def __doc__(self, val): + """Sets the original documentation to the given value and resets the + cached documentation.""" + + self._orig_doc = val + self._cached_doc = None + + @property + def __signature__(self): + """Return the signature of the original function, with the addition of + the `backend` and `backend_kwargs` parameters.""" + + if self._sig is None: + sig = inspect.signature(self.orig_func) + # `backend` is now a reserved argument used by dispatching. + # assert "backend" not in sig.parameters + if not any( + p.kind == inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values() + ): + sig = sig.replace( + parameters=[ + *sig.parameters.values(), + inspect.Parameter( + "backend", inspect.Parameter.KEYWORD_ONLY, default=None + ), + inspect.Parameter( + "backend_kwargs", inspect.Parameter.VAR_KEYWORD + ), + ] + ) + else: + *parameters, var_keyword = sig.parameters.values() + sig = sig.replace( + parameters=[ + *parameters, + inspect.Parameter( + "backend", inspect.Parameter.KEYWORD_ONLY, default=None + ), + var_keyword, + ] + ) + self._sig = sig + return self._sig + + # Fast, simple path if no backends are installed + def _call_if_no_backends_installed(self, /, *args, backend=None, **kwargs): + """Returns the result of the original function (no backends installed).""" + if backend is not None and backend != "networkx": + raise ImportError(f"'{backend}' backend is not installed") + if "networkx" not in self.backends: + raise NotImplementedError( + f"'{self.name}' is not implemented by 'networkx' backend. " + "This function is included in NetworkX as an API to dispatch to " + "other backends." + ) + return self.orig_func(*args, **kwargs) + + # Dispatch to backends based on inputs, `backend=` arg, or configuration + def _call_if_any_backends_installed(self, /, *args, backend=None, **kwargs): + """Returns the result of the original function, or the backend function if + the backend is specified and that backend implements `func`.""" + # Use `backend_name` in this function instead of `backend`. + # This is purely for aesthetics and to make it easier to search for this + # variable since "backend" is used in many comments and log/error messages. + backend_name = backend + if backend_name is not None and backend_name not in backend_info: + raise ImportError(f"'{backend_name}' backend is not installed") + + graphs_resolved = {} + for gname, pos in self.graphs.items(): + if pos < len(args): + if gname in kwargs: + raise TypeError(f"{self.name}() got multiple values for {gname!r}") + graph = args[pos] + elif gname in kwargs: + graph = kwargs[gname] + elif gname not in self.optional_graphs: + raise TypeError( + f"{self.name}() missing required graph argument: {gname}" + ) + else: + continue + if graph is None: + if gname not in self.optional_graphs: + raise TypeError( + f"{self.name}() required graph argument {gname!r} is None; must be a graph" + ) + else: + graphs_resolved[gname] = graph + + # Alternative to the above that does not check duplicated args or missing required graphs. + # graphs_resolved = { + # gname: graph + # for gname, pos in self.graphs.items() + # if (graph := args[pos] if pos < len(args) else kwargs.get(gname)) is not None + # } + + # Check if any graph comes from a backend + if self.list_graphs: + # Make sure we don't lose values by consuming an iterator + args = list(args) + for gname in self.list_graphs & graphs_resolved.keys(): + list_of_graphs = list(graphs_resolved[gname]) + graphs_resolved[gname] = list_of_graphs + if gname in kwargs: + kwargs[gname] = list_of_graphs + else: + args[self.graphs[gname]] = list_of_graphs + + graph_backend_names = { + getattr(g, "__networkx_backend__", None) + for gname, g in graphs_resolved.items() + if gname not in self.list_graphs + } + for gname in self.list_graphs & graphs_resolved.keys(): + graph_backend_names.update( + getattr(g, "__networkx_backend__", None) + for g in graphs_resolved[gname] + ) + else: + graph_backend_names = { + getattr(g, "__networkx_backend__", None) + for g in graphs_resolved.values() + } + + backend_priority = nx.config.backend_priority.get( + self.name, + nx.config.backend_priority.generators + if self._returns_graph + else nx.config.backend_priority.algos, + ) + fallback_to_nx = nx.config.fallback_to_nx and "networkx" in self.backends + if self._is_testing and backend_priority and backend_name is None: + # Special path if we are running networkx tests with a backend. + # This even runs for (and handles) functions that mutate input graphs. + return self._convert_and_call_for_tests( + backend_priority[0], + args, + kwargs, + fallback_to_nx=fallback_to_nx, + ) + + graph_backend_names.discard(None) + if backend_name is not None: + # Must run with the given backend. + # `can_run` only used for better log and error messages. + # Check `mutates_input` for logging, not behavior. + backend_kwarg_msg = ( + "No other backends will be attempted, because the backend was " + f"specified with the `backend='{backend_name}'` keyword argument." + ) + extra_message = ( + f"'{backend_name}' backend raised NotImplementedError when calling " + f"'{self.name}'. {backend_kwarg_msg}" + ) + if not graph_backend_names or graph_backend_names == {backend_name}: + # All graphs are backend graphs--no need to convert! + if self._can_backend_run(backend_name, args, kwargs): + return self._call_with_backend( + backend_name, args, kwargs, extra_message=extra_message + ) + if self._does_backend_have(backend_name): + extra = " for the given arguments" + else: + extra = "" + raise NotImplementedError( + f"'{self.name}' is not implemented by '{backend_name}' backend" + f"{extra}. {backend_kwarg_msg}" + ) + if self._can_convert(backend_name, graph_backend_names): + if self._can_backend_run(backend_name, args, kwargs): + if self._will_call_mutate_input(args, kwargs): + _logger.debug( + "'%s' will mutate an input graph. This prevents automatic conversion " + "to, and use of, backends listed in `nx.config.backend_priority`. " + "Using backend specified by the " + "`backend='%s'` keyword argument. This may change behavior by not " + "mutating inputs.", + self.name, + backend_name, + ) + mutations = [] + else: + mutations = None + rv = self._convert_and_call( + backend_name, + graph_backend_names, + args, + kwargs, + extra_message=extra_message, + mutations=mutations, + ) + if mutations: + for cache, key in mutations: + # If the call mutates inputs, then remove all inputs gotten + # from cache. We do this after all conversions (and call) so + # that a graph can be gotten from a cache multiple times. + cache.pop(key, None) + return rv + if self._does_backend_have(backend_name): + extra = " for the given arguments" + else: + extra = "" + raise NotImplementedError( + f"'{self.name}' is not implemented by '{backend_name}' backend" + f"{extra}. {backend_kwarg_msg}" + ) + if len(graph_backend_names) == 1: + maybe_s = "" + graph_backend_names = f"'{next(iter(graph_backend_names))}'" + else: + maybe_s = "s" + raise TypeError( + f"'{self.name}' is unable to convert graph from backend{maybe_s} " + f"{graph_backend_names} to '{backend_name}' backend, which was " + f"specified with the `backend='{backend_name}'` keyword argument. " + f"{backend_kwarg_msg}" + ) + + if self._will_call_mutate_input(args, kwargs): + # The current behavior for functions that mutate input graphs: + # + # 1. If backend is specified by `backend=` keyword, use it (done above). + # 2. If inputs are from one backend, try to use it. + # 3. If all input graphs are instances of `nx.Graph`, then run with the + # default "networkx" implementation. + # + # Do not automatically convert if a call will mutate inputs, because doing + # so would change behavior. Hence, we should fail if there are multiple input + # backends or if the input backend does not implement the function. However, + # we offer a way for backends to circumvent this if they do not implement + # this function: we will fall back to the default "networkx" implementation + # without using conversions if all input graphs are subclasses of `nx.Graph`. + mutate_msg = ( + "conversions between backends (if configured) will not be attempted " + "because the original input graph would not be mutated. Using the " + "backend keyword e.g. `backend='some_backend'` will force conversions " + "and not mutate the original input graph." + ) + fallback_msg = ( + "This call will mutate inputs, so fall back to 'networkx' " + "backend (without converting) since all input graphs are " + "instances of nx.Graph and are hopefully compatible." + ) + if len(graph_backend_names) == 1: + [backend_name] = graph_backend_names + msg_template = ( + f"Backend '{backend_name}' does not implement '{self.name}'%s. " + f"This call will mutate an input, so automatic {mutate_msg}" + ) + # `can_run` is only used for better log and error messages + try: + if self._can_backend_run(backend_name, args, kwargs): + return self._call_with_backend( + backend_name, + args, + kwargs, + extra_message=msg_template % " with these arguments", + ) + except NotImplementedError as exc: + if all(isinstance(g, nx.Graph) for g in graphs_resolved.values()): + _logger.debug( + "Backend '%s' raised when calling '%s': %s. %s", + backend_name, + self.name, + exc, + fallback_msg, + ) + else: + raise + else: + if fallback_to_nx and all( + # Consider dropping the `isinstance` check here to allow + # duck-type graphs, but let's wait for a backend to ask us. + isinstance(g, nx.Graph) + for g in graphs_resolved.values() + ): + # Log that we are falling back to networkx + _logger.debug( + "Backend '%s' can't run '%s'. %s", + backend_name, + self.name, + fallback_msg, + ) + else: + if self._does_backend_have(backend_name): + extra = " with these arguments" + else: + extra = "" + raise NotImplementedError(msg_template % extra) + elif fallback_to_nx and all( + # Consider dropping the `isinstance` check here to allow + # duck-type graphs, but let's wait for a backend to ask us. + isinstance(g, nx.Graph) + for g in graphs_resolved.values() + ): + # Log that we are falling back to networkx + _logger.debug( + "'%s' was called with inputs from multiple backends: %s. %s", + self.name, + graph_backend_names, + fallback_msg, + ) + else: + raise RuntimeError( + f"'{self.name}' will mutate an input, but it was called with " + f"inputs from multiple backends: {graph_backend_names}. " + f"Automatic {mutate_msg}" + ) + # At this point, no backends are available to handle the call with + # the input graph types, but if the input graphs are compatible + # nx.Graph instances, fall back to networkx without converting. + return self.orig_func(*args, **kwargs) + + # We may generalize fallback configuration as e.g. `nx.config.backend_fallback` + if fallback_to_nx or not graph_backend_names: + # Use "networkx" by default if there are no inputs from backends. + # For example, graph generators should probably return NetworkX graphs + # instead of raising NotImplementedError. + backend_fallback = ["networkx"] + else: + backend_fallback = [] + + # ########################## + # # How this behaves today # + # ########################## + # + # The prose below describes the implementation and a *possible* way to + # generalize "networkx" as "just another backend". The code is structured + # to perhaps someday support backend-to-backend conversions (including + # simply passing objects from one backend directly to another backend; + # the dispatch machinery does not necessarily need to perform conversions), + # but since backend-to-backend matching is not yet supported, the following + # code is merely a convenient way to implement dispatch behaviors that have + # been carefully developed since NetworkX 3.0 and to include falling back + # to the default NetworkX implementation. + # + # The current behavior for functions that don't mutate input graphs: + # + # 1. If backend is specified by `backend=` keyword, use it (done above). + # 2. If input is from a backend other than "networkx", try to use it. + # - Note: if present, "networkx" graphs will be converted to the backend. + # 3. If input is from "networkx" (or no backend), try to use backends from + # `backend_priority` before running with the default "networkx" implementation. + # 4. If configured, "fall back" and run with the default "networkx" implementation. + # + # ################################################ + # # How this is implemented and may work someday # + # ################################################ + # + # Let's determine the order of backends we should try according + # to `backend_priority`, `backend_fallback`, and input backends. + # There are two† dimensions of priorities to consider: + # backend_priority > unspecified > backend_fallback + # and + # backend of an input > not a backend of an input + # These are combined to form five groups of priorities as such: + # + # input ~input + # +-------+-------+ + # backend_priority | 1 | 2 | + # unspecified | 3 | N/A | (if only 1) + # backend_fallback | 4 | 5 | + # +-------+-------+ + # + # This matches the behaviors we developed in versions 3.0 to 3.2, it + # ought to cover virtually all use cases we expect, and I (@eriknw) don't + # think it can be done any simpler (although it can be generalized further + # and made to be more complicated to capture 100% of *possible* use cases). + # Some observations: + # + # 1. If an input is in `backend_priority`, it will be used before trying a + # backend that is higher priority in `backend_priority` and not an input. + # 2. To prioritize converting from one backend to another even if both implement + # a function, list one in `backend_priority` and one in `backend_fallback`. + # 3. To disable conversions, set `backend_priority` and `backend_fallback` to []. + # + # †: There is actually a third dimension of priorities: + # should_run == True > should_run == False + # Backends with `can_run == True` and `should_run == False` are tried last. + # + seen = set() + group1 = [] # In backend_priority, and an input + group2 = [] # In backend_priority, but not an input + for name in backend_priority: + if name in seen: + continue + seen.add(name) + if name in graph_backend_names: + group1.append(name) + else: + group2.append(name) + group4 = [] # In backend_fallback, and an input + group5 = [] # In backend_fallback, but not an input + for name in backend_fallback: + if name in seen: + continue + seen.add(name) + if name in graph_backend_names: + group4.append(name) + else: + group5.append(name) + # An input, but not in backend_priority or backend_fallback. + group3 = graph_backend_names - seen + if len(group3) > 1: + # `group3` backends are not configured for automatic conversion or fallback. + # There are at least two issues if this group contains multiple backends: + # + # 1. How should we prioritize them? We have no good way to break ties. + # Although we could arbitrarily choose alphabetical or left-most, + # let's follow the Zen of Python and refuse the temptation to guess. + # 2. We probably shouldn't automatically convert to these backends, + # because we are not configured to do so. + # + # (2) is important to allow disabling all conversions by setting both + # `nx.config.backend_priority` and `nx.config.backend_fallback` to []. + # + # If there is a single backend in `group3`, then giving it priority over + # the fallback backends is what is generally expected. For example, this + # allows input graphs of `backend_fallback` backends (such as "networkx") + # to be converted to, and run with, the unspecified backend. + _logger.debug( + "Call to '%s' has inputs from multiple backends, %s, that " + "have no priority set in `nx.config.backend_priority`, " + "so automatic conversions to " + "these backends will not be attempted.", + self.name, + group3, + ) + group3 = () + + try_order = list(itertools.chain(group1, group2, group3, group4, group5)) + if len(try_order) > 1: + # Should we consider adding an option for more verbose logging? + # For example, we could explain the order of `try_order` in detail. + _logger.debug( + "Call to '%s' has inputs from %s backends, and will try to use " + "backends in the following order: %s", + self.name, + graph_backend_names or "no", + try_order, + ) + backends_to_try_again = [] + for is_not_first, backend_name in enumerate(try_order): + if is_not_first: + _logger.debug("Trying next backend: '%s'", backend_name) + try: + if not graph_backend_names or graph_backend_names == {backend_name}: + if self._can_backend_run(backend_name, args, kwargs): + return self._call_with_backend(backend_name, args, kwargs) + elif self._can_convert( + backend_name, graph_backend_names + ) and self._can_backend_run(backend_name, args, kwargs): + if self._should_backend_run(backend_name, args, kwargs): + rv = self._convert_and_call( + backend_name, graph_backend_names, args, kwargs + ) + if ( + self._returns_graph + and graph_backend_names + and backend_name not in graph_backend_names + ): + # If the function has graph inputs and graph output, we try + # to make it so the backend of the return type will match the + # backend of the input types. In case this is not possible, + # let's tell the user that the backend of the return graph + # has changed. Perhaps we could try to convert back, but + # "fallback" backends for graph generators should typically + # be compatible with NetworkX graphs. + _logger.debug( + "Call to '%s' is returning a graph from a different " + "backend! It has inputs from %s backends, but ran with " + "'%s' backend and is returning graph from '%s' backend", + self.name, + graph_backend_names, + backend_name, + backend_name, + ) + return rv + # `should_run` is False, but `can_run` is True, so try again later + backends_to_try_again.append(backend_name) + except NotImplementedError as exc: + _logger.debug( + "Backend '%s' raised when calling '%s': %s", + backend_name, + self.name, + exc, + ) + + # We are about to fail. Let's try backends with can_run=True and should_run=False. + # This is unlikely to help today since we try to run with "networkx" before this. + for backend_name in backends_to_try_again: + _logger.debug( + "Trying backend: '%s' (ignoring `should_run=False`)", backend_name + ) + try: + rv = self._convert_and_call( + backend_name, graph_backend_names, args, kwargs + ) + if ( + self._returns_graph + and graph_backend_names + and backend_name not in graph_backend_names + ): + _logger.debug( + "Call to '%s' is returning a graph from a different " + "backend! It has inputs from %s backends, but ran with " + "'%s' backend and is returning graph from '%s' backend", + self.name, + graph_backend_names, + backend_name, + backend_name, + ) + return rv + except NotImplementedError as exc: + _logger.debug( + "Backend '%s' raised when calling '%s': %s", + backend_name, + self.name, + exc, + ) + # As a final effort, we could try to convert and run with `group3` backends + # that we discarded when `len(group3) > 1`, but let's not consider doing + # so until there is a reasonable request for it. + + if len(unspecified_backends := graph_backend_names - seen) > 1: + raise TypeError( + f"Unable to convert inputs from {graph_backend_names} backends and " + f"run '{self.name}'. NetworkX is configured to automatically convert " + f"to {try_order} backends. To remedy this, you may enable automatic " + f"conversion to {unspecified_backends} backends by adding them to " + "`nx.config.backend_priority`, or you " + "may specify a backend to use with the `backend=` keyword argument." + ) + if "networkx" not in self.backends: + extra = ( + " This function is included in NetworkX as an API to dispatch to " + "other backends." + ) + else: + extra = "" + raise NotImplementedError( + f"'{self.name}' is not implemented by {try_order} backends. To remedy " + "this, you may enable automatic conversion to more backends (including " + "'networkx') by adding them to `nx.config.backend_priority`, " + "or you may specify a backend to use with " + f"the `backend=` keyword argument.{extra}" + ) + + # Dispatch only if there exist any installed backend(s) + __call__: typing.Callable = ( + _call_if_any_backends_installed if backends else _call_if_no_backends_installed + ) + + def _will_call_mutate_input(self, args, kwargs): + # Fairly few nx functions mutate the input graph. Most that do, always do. + # So a boolean input indicates "always" or "never". + if isinstance((mutates_input := self.mutates_input), bool): + return mutates_input + + # The ~10 other nx functions either use "copy=True" to control mutation or + # an arg naming an edge/node attribute to mutate (None means no mutation). + # Now `mutates_input` is a dict keyed by arg_name to its func-sig position. + # The `copy=` args are keyed as "not copy" to mean "negate the copy argument". + # Keys w/o "not " mean the call mutates only when the arg value `is not None`. + # + # This section might need different code if new functions mutate in new ways. + # + # NetworkX doesn't have any `mutates_input` dicts with more than 1 item. + # But we treat it like it might have more than 1 item for generality. + n = len(args) + return any( + (args[arg_pos] if n > arg_pos else kwargs.get(arg_name)) is not None + if not arg_name.startswith("not ") + # This assumes that e.g. `copy=True` is the default + else not (args[arg_pos] if n > arg_pos else kwargs.get(arg_name[4:], True)) + for arg_name, arg_pos in mutates_input.items() + ) + + def _can_convert(self, backend_name, graph_backend_names): + # Backend-to-backend conversion not supported yet. + # We can only convert to and from networkx. + rv = backend_name == "networkx" or graph_backend_names.issubset( + {"networkx", backend_name} + ) + if not rv: + _logger.debug( + "Unable to convert from %s backends to '%s' backend", + graph_backend_names, + backend_name, + ) + return rv + + def _does_backend_have(self, backend_name): + """Does the specified backend have this algorithm?""" + if backend_name == "networkx": + return "networkx" in self.backends + # Inspect the backend; don't trust metadata used to create `self.backends` + backend = _load_backend(backend_name) + return hasattr(backend, self.name) + + def _can_backend_run(self, backend_name, args, kwargs): + """Can the specified backend run this algorithm with these arguments?""" + if backend_name == "networkx": + return "networkx" in self.backends + backend = _load_backend(backend_name) + # `backend.can_run` and `backend.should_run` may return strings that describe + # why they can't or shouldn't be run. + if not hasattr(backend, self.name): + _logger.debug( + "Backend '%s' does not implement '%s'", backend_name, self.name + ) + return False + can_run = backend.can_run(self.name, args, kwargs) + if isinstance(can_run, str) or not can_run: + reason = f", because: {can_run}" if isinstance(can_run, str) else "" + _logger.debug( + "Backend '%s' can't run `%s` with arguments: %s%s", + backend_name, + self.name, + _LazyArgsRepr(self, args, kwargs), + reason, + ) + return False + return True + + def _should_backend_run(self, backend_name, args, kwargs): + """Should the specified backend run this algorithm with these arguments? + + Note that this does not check ``backend.can_run``. + """ + # `backend.can_run` and `backend.should_run` may return strings that describe + # why they can't or shouldn't be run. + # `_should_backend_run` may assume that `_can_backend_run` returned True. + if backend_name == "networkx": + return True + backend = _load_backend(backend_name) + should_run = backend.should_run(self.name, args, kwargs) + if isinstance(should_run, str) or not should_run: + reason = f", because: {should_run}" if isinstance(should_run, str) else "" + _logger.debug( + "Backend '%s' shouldn't run `%s` with arguments: %s%s", + backend_name, + self.name, + _LazyArgsRepr(self, args, kwargs), + reason, + ) + return False + return True + + def _convert_arguments(self, backend_name, args, kwargs, *, use_cache, mutations): + """Convert graph arguments to the specified backend. + + Returns + ------- + args tuple and kwargs dict + """ + bound = self.__signature__.bind(*args, **kwargs) + bound.apply_defaults() + if not self.graphs: + bound_kwargs = bound.kwargs + del bound_kwargs["backend"] + return bound.args, bound_kwargs + if backend_name == "networkx": + # `backend_interface.convert_from_nx` preserves everything + preserve_edge_attrs = preserve_node_attrs = preserve_graph_attrs = True + else: + preserve_edge_attrs = self.preserve_edge_attrs + preserve_node_attrs = self.preserve_node_attrs + preserve_graph_attrs = self.preserve_graph_attrs + edge_attrs = self.edge_attrs + node_attrs = self.node_attrs + # Convert graphs into backend graph-like object + # Include the edge and/or node labels if provided to the algorithm + if preserve_edge_attrs is False: + # e.g. `preserve_edge_attrs=False` + pass + elif preserve_edge_attrs is True: + # e.g. `preserve_edge_attrs=True` + edge_attrs = None + elif isinstance(preserve_edge_attrs, str): + if bound.arguments[preserve_edge_attrs] is True or callable( + bound.arguments[preserve_edge_attrs] + ): + # e.g. `preserve_edge_attrs="attr"` and `func(attr=True)` + # e.g. `preserve_edge_attrs="attr"` and `func(attr=myfunc)` + preserve_edge_attrs = True + edge_attrs = None + elif bound.arguments[preserve_edge_attrs] is False and ( + isinstance(edge_attrs, str) + and edge_attrs == preserve_edge_attrs + or isinstance(edge_attrs, dict) + and preserve_edge_attrs in edge_attrs + ): + # e.g. `preserve_edge_attrs="attr"` and `func(attr=False)` + # Treat `False` argument as meaning "preserve_edge_data=False" + # and not `False` as the edge attribute to use. + preserve_edge_attrs = False + edge_attrs = None + else: + # e.g. `preserve_edge_attrs="attr"` and `func(attr="weight")` + preserve_edge_attrs = False + # Else: e.g. `preserve_edge_attrs={"G": {"weight": 1}}` + + if edge_attrs is None: + # May have been set to None above b/c all attributes are preserved + pass + elif isinstance(edge_attrs, str): + if edge_attrs[0] == "[": + # e.g. `edge_attrs="[edge_attributes]"` (argument of list of attributes) + # e.g. `func(edge_attributes=["foo", "bar"])` + edge_attrs = dict.fromkeys(bound.arguments[edge_attrs[1:-1]], 1) + elif callable(bound.arguments[edge_attrs]): + # e.g. `edge_attrs="weight"` and `func(weight=myfunc)` + preserve_edge_attrs = True + edge_attrs = None + elif bound.arguments[edge_attrs] is not None: + # e.g. `edge_attrs="weight"` and `func(weight="foo")` (default of 1) + edge_attrs = {bound.arguments[edge_attrs]: 1} + elif self.name == "to_numpy_array" and hasattr( + bound.arguments["dtype"], "names" + ): + # Custom handling: attributes may be obtained from `dtype` + edge_attrs = dict.fromkeys(bound.arguments["dtype"].names, 1) + else: + # e.g. `edge_attrs="weight"` and `func(weight=None)` + edge_attrs = None + else: + # e.g. `edge_attrs={"attr": "default"}` and `func(attr="foo", default=7)` + # e.g. `edge_attrs={"attr": 0}` and `func(attr="foo")` + edge_attrs = { + edge_attr: bound.arguments.get(val, 1) if isinstance(val, str) else val + for key, val in edge_attrs.items() + if (edge_attr := bound.arguments[key]) is not None + } + + if preserve_node_attrs is False: + # e.g. `preserve_node_attrs=False` + pass + elif preserve_node_attrs is True: + # e.g. `preserve_node_attrs=True` + node_attrs = None + elif isinstance(preserve_node_attrs, str): + if bound.arguments[preserve_node_attrs] is True or callable( + bound.arguments[preserve_node_attrs] + ): + # e.g. `preserve_node_attrs="attr"` and `func(attr=True)` + # e.g. `preserve_node_attrs="attr"` and `func(attr=myfunc)` + preserve_node_attrs = True + node_attrs = None + elif bound.arguments[preserve_node_attrs] is False and ( + isinstance(node_attrs, str) + and node_attrs == preserve_node_attrs + or isinstance(node_attrs, dict) + and preserve_node_attrs in node_attrs + ): + # e.g. `preserve_node_attrs="attr"` and `func(attr=False)` + # Treat `False` argument as meaning "preserve_node_data=False" + # and not `False` as the node attribute to use. Is this used? + preserve_node_attrs = False + node_attrs = None + else: + # e.g. `preserve_node_attrs="attr"` and `func(attr="weight")` + preserve_node_attrs = False + # Else: e.g. `preserve_node_attrs={"G": {"pos": None}}` + + if node_attrs is None: + # May have been set to None above b/c all attributes are preserved + pass + elif isinstance(node_attrs, str): + if node_attrs[0] == "[": + # e.g. `node_attrs="[node_attributes]"` (argument of list of attributes) + # e.g. `func(node_attributes=["foo", "bar"])` + node_attrs = dict.fromkeys(bound.arguments[node_attrs[1:-1]]) + elif callable(bound.arguments[node_attrs]): + # e.g. `node_attrs="weight"` and `func(weight=myfunc)` + preserve_node_attrs = True + node_attrs = None + elif bound.arguments[node_attrs] is not None: + # e.g. `node_attrs="weight"` and `func(weight="foo")` + node_attrs = {bound.arguments[node_attrs]: None} + else: + # e.g. `node_attrs="weight"` and `func(weight=None)` + node_attrs = None + else: + # e.g. `node_attrs={"attr": "default"}` and `func(attr="foo", default=7)` + # e.g. `node_attrs={"attr": 0}` and `func(attr="foo")` + node_attrs = { + node_attr: bound.arguments.get(val) if isinstance(val, str) else val + for key, val in node_attrs.items() + if (node_attr := bound.arguments[key]) is not None + } + + # It should be safe to assume that we either have networkx graphs or backend graphs. + # Future work: allow conversions between backends. + for gname in self.graphs: + if gname in self.list_graphs: + bound.arguments[gname] = [ + self._convert_graph( + backend_name, + g, + edge_attrs=edge_attrs, + node_attrs=node_attrs, + preserve_edge_attrs=preserve_edge_attrs, + preserve_node_attrs=preserve_node_attrs, + preserve_graph_attrs=preserve_graph_attrs, + graph_name=gname, + use_cache=use_cache, + mutations=mutations, + ) + if getattr(g, "__networkx_backend__", "networkx") != backend_name + else g + for g in bound.arguments[gname] + ] + else: + graph = bound.arguments[gname] + if graph is None: + if gname in self.optional_graphs: + continue + raise TypeError( + f"Missing required graph argument `{gname}` in {self.name} function" + ) + if isinstance(preserve_edge_attrs, dict): + preserve_edges = False + edges = preserve_edge_attrs.get(gname, edge_attrs) + else: + preserve_edges = preserve_edge_attrs + edges = edge_attrs + if isinstance(preserve_node_attrs, dict): + preserve_nodes = False + nodes = preserve_node_attrs.get(gname, node_attrs) + else: + preserve_nodes = preserve_node_attrs + nodes = node_attrs + if isinstance(preserve_graph_attrs, set): + preserve_graph = gname in preserve_graph_attrs + else: + preserve_graph = preserve_graph_attrs + if getattr(graph, "__networkx_backend__", "networkx") != backend_name: + bound.arguments[gname] = self._convert_graph( + backend_name, + graph, + edge_attrs=edges, + node_attrs=nodes, + preserve_edge_attrs=preserve_edges, + preserve_node_attrs=preserve_nodes, + preserve_graph_attrs=preserve_graph, + graph_name=gname, + use_cache=use_cache, + mutations=mutations, + ) + bound_kwargs = bound.kwargs + del bound_kwargs["backend"] + return bound.args, bound_kwargs + + def _convert_graph( + self, + backend_name, + graph, + *, + edge_attrs, + node_attrs, + preserve_edge_attrs, + preserve_node_attrs, + preserve_graph_attrs, + graph_name, + use_cache, + mutations, + ): + nx_cache = getattr(graph, "__networkx_cache__", None) if use_cache else None + if nx_cache is not None: + cache = nx_cache.setdefault("backends", {}).setdefault(backend_name, {}) + key = _get_cache_key( + edge_attrs=edge_attrs, + node_attrs=node_attrs, + preserve_edge_attrs=preserve_edge_attrs, + preserve_node_attrs=preserve_node_attrs, + preserve_graph_attrs=preserve_graph_attrs, + ) + compat_key, rv = _get_from_cache(cache, key, mutations=mutations) + if rv is not None: + if "cache" not in nx.config.warnings_to_ignore: + warnings.warn( + "Note: conversions to backend graphs are saved to cache " + "(`G.__networkx_cache__` on the original graph) by default." + "\n\nThis warning means the cached graph is being used " + f"for the {backend_name!r} backend in the " + f"call to {self.name}.\n\nFor the cache to be consistent " + "(i.e., correct), the input graph must not have been " + "manually mutated since the cached graph was created. " + "Examples of manually mutating the graph data structures " + "resulting in an inconsistent cache include:\n\n" + " >>> G[u][v][key] = val\n\n" + "and\n\n" + " >>> for u, v, d in G.edges(data=True):\n" + " ... d[key] = val\n\n" + "Using methods such as `G.add_edge(u, v, weight=val)` " + "will correctly clear the cache to keep it consistent. " + "You may also use `G.__networkx_cache__.clear()` to " + "manually clear the cache, or set `G.__networkx_cache__` " + "to None to disable caching for G. Enable or disable caching " + "globally via `nx.config.cache_converted_graphs` config.\n\n" + "To disable this warning:\n\n" + ' >>> nx.config.warnings_to_ignore.add("cache")\n' + ) + if rv == FAILED_TO_CONVERT: + # NotImplementedError is reasonable to use since the backend doesn't + # implement this conversion. However, this will be different than + # the original exception that the backend raised when it failed. + # Using NotImplementedError allows the next backend to be attempted. + raise NotImplementedError( + "Graph conversion aborted: unable to convert graph to " + f"'{backend_name}' backend in call to `{self.name}', " + "because this conversion has previously failed." + ) + _logger.debug( + "Using cached converted graph (from '%s' to '%s' backend) " + "in call to '%s' for '%s' argument", + getattr(graph, "__networkx_backend__", None), + backend_name, + self.name, + graph_name, + ) + return rv + + if backend_name == "networkx": + # Perhaps we should check that "__networkx_backend__" attribute exists + # and return the original object if not. + if not hasattr(graph, "__networkx_backend__"): + _logger.debug( + "Unable to convert input to 'networkx' backend in call to '%s' for " + "'%s argument, because it is not from a backend (i.e., it does not " + "have `G.__networkx_backend__` attribute). Using the original " + "object: %s", + self.name, + graph_name, + graph, + ) + # This may fail, but let it fail in the networkx function + return graph + backend = _load_backend(graph.__networkx_backend__) + try: + rv = backend.convert_to_nx(graph) + except Exception: + if nx_cache is not None: + _set_to_cache(cache, key, FAILED_TO_CONVERT) + raise + else: + backend = _load_backend(backend_name) + try: + rv = backend.convert_from_nx( + graph, + edge_attrs=edge_attrs, + node_attrs=node_attrs, + preserve_edge_attrs=preserve_edge_attrs, + preserve_node_attrs=preserve_node_attrs, + # Always preserve graph attrs when we are caching b/c this should be + # cheap and may help prevent extra (unnecessary) conversions. Because + # we do this, we don't need `preserve_graph_attrs` in the cache key. + preserve_graph_attrs=preserve_graph_attrs or nx_cache is not None, + name=self.name, + graph_name=graph_name, + ) + except Exception: + if nx_cache is not None: + _set_to_cache(cache, key, FAILED_TO_CONVERT) + raise + if nx_cache is not None: + _set_to_cache(cache, key, rv) + _logger.debug( + "Caching converted graph (from '%s' to '%s' backend) " + "in call to '%s' for '%s' argument", + getattr(graph, "__networkx_backend__", None), + backend_name, + self.name, + graph_name, + ) + + return rv + + def _call_with_backend(self, backend_name, args, kwargs, *, extra_message=None): + """Call this dispatchable function with a backend without converting inputs.""" + if backend_name == "networkx": + return self.orig_func(*args, **kwargs) + backend = _load_backend(backend_name) + _logger.debug( + "Using backend '%s' for call to '%s' with arguments: %s", + backend_name, + self.name, + _LazyArgsRepr(self, args, kwargs), + ) + try: + return getattr(backend, self.name)(*args, **kwargs) + except NotImplementedError as exc: + if extra_message is not None: + _logger.debug( + "Backend '%s' raised when calling '%s': %s", + backend_name, + self.name, + exc, + ) + raise NotImplementedError(extra_message) from exc + raise + + def _convert_and_call( + self, + backend_name, + input_backend_names, + args, + kwargs, + *, + extra_message=None, + mutations=None, + ): + """Call this dispatchable function with a backend after converting inputs. + + Parameters + ---------- + backend_name : str + input_backend_names : set[str] + args : arguments tuple + kwargs : keywords dict + extra_message : str, optional + Additional message to log if NotImplementedError is raised by backend. + mutations : list, optional + Used to clear objects gotten from cache if inputs will be mutated. + """ + if backend_name == "networkx": + func = self.orig_func + else: + backend = _load_backend(backend_name) + func = getattr(backend, self.name) + other_backend_names = input_backend_names - {backend_name} + _logger.debug( + "Converting input graphs from %s backend%s to '%s' backend for call to '%s'", + other_backend_names + if len(other_backend_names) > 1 + else f"'{next(iter(other_backend_names))}'", + "s" if len(other_backend_names) > 1 else "", + backend_name, + self.name, + ) + try: + converted_args, converted_kwargs = self._convert_arguments( + backend_name, + args, + kwargs, + use_cache=nx.config.cache_converted_graphs, + mutations=mutations, + ) + except NotImplementedError as exc: + # Only log the exception if we are adding an extra message + # because we don't want to lose any information. + _logger.debug( + "Failed to convert graphs from %s to '%s' backend for call to '%s'" + + ("" if extra_message is None else ": %s"), + input_backend_names, + backend_name, + self.name, + *(() if extra_message is None else (exc,)), + ) + if extra_message is not None: + raise NotImplementedError(extra_message) from exc + raise + if backend_name != "networkx": + _logger.debug( + "Using backend '%s' for call to '%s' with arguments: %s", + backend_name, + self.name, + _LazyArgsRepr(self, converted_args, converted_kwargs), + ) + try: + return func(*converted_args, **converted_kwargs) + except NotImplementedError as exc: + if extra_message is not None: + _logger.debug( + "Backend '%s' raised when calling '%s': %s", + backend_name, + self.name, + exc, + ) + raise NotImplementedError(extra_message) from exc + raise + + def _convert_and_call_for_tests( + self, backend_name, args, kwargs, *, fallback_to_nx=False + ): + """Call this dispatchable function with a backend; for use with testing.""" + backend = _load_backend(backend_name) + if not self._can_backend_run(backend_name, args, kwargs): + if fallback_to_nx or not self.graphs: + if fallback_to_nx: + _logger.debug( + "Falling back to use 'networkx' instead of '%s' backend " + "for call to '%s' with arguments: %s", + backend_name, + self.name, + _LazyArgsRepr(self, args, kwargs), + ) + return self.orig_func(*args, **kwargs) + + import pytest + + msg = f"'{self.name}' not implemented by {backend_name}" + if hasattr(backend, self.name): + msg += " with the given arguments" + pytest.xfail(msg) + + from collections.abc import Iterable, Iterator, Mapping + from copy import copy, deepcopy + from io import BufferedReader, BytesIO, StringIO, TextIOWrapper + from itertools import tee + from random import Random + + import numpy as np + from numpy.random import Generator, RandomState + from scipy.sparse import sparray + + # We sometimes compare the backend result (or input graphs) to the + # original result (or input graphs), so we need two sets of arguments. + compare_result_to_nx = ( + self._returns_graph + and "networkx" in self.backends + and self.name + not in { + # Has graphs as node values (unable to compare) + "quotient_graph", + # We don't handle tempfile.NamedTemporaryFile arguments + "read_gml", + "read_graph6", + "read_sparse6", + # We don't handle io.BufferedReader or io.TextIOWrapper arguments + "bipartite_read_edgelist", + "read_adjlist", + "read_edgelist", + "read_graphml", + "read_multiline_adjlist", + "read_pajek", + "from_pydot", + "pydot_read_dot", + "agraph_read_dot", + # graph comparison fails b/c of nan values + "read_gexf", + } + ) + compare_inputs_to_nx = ( + "networkx" in self.backends and self._will_call_mutate_input(args, kwargs) + ) + + # Tee iterators and copy random state so that they may be used twice. + if not args or not compare_result_to_nx and not compare_inputs_to_nx: + args_to_convert = args_nx = args + else: + args_to_convert, args_nx = zip( + *( + (arg, deepcopy(arg)) + if isinstance(arg, RandomState) + else (arg, copy(arg)) + if isinstance(arg, BytesIO | StringIO | Random | Generator) + else tee(arg) + if isinstance(arg, Iterator) + and not isinstance(arg, BufferedReader | TextIOWrapper) + else (arg, arg) + for arg in args + ) + ) + if not kwargs or not compare_result_to_nx and not compare_inputs_to_nx: + kwargs_to_convert = kwargs_nx = kwargs + else: + kwargs_to_convert, kwargs_nx = zip( + *( + ((k, v), (k, deepcopy(v))) + if isinstance(v, RandomState) + else ((k, v), (k, copy(v))) + if isinstance(v, BytesIO | StringIO | Random | Generator) + else ((k, (teed := tee(v))[0]), (k, teed[1])) + if isinstance(v, Iterator) + and not isinstance(v, BufferedReader | TextIOWrapper) + else ((k, v), (k, v)) + for k, v in kwargs.items() + ) + ) + kwargs_to_convert = dict(kwargs_to_convert) + kwargs_nx = dict(kwargs_nx) + + try: + converted_args, converted_kwargs = self._convert_arguments( + backend_name, + args_to_convert, + kwargs_to_convert, + use_cache=False, + mutations=None, + ) + except NotImplementedError as exc: + if fallback_to_nx: + _logger.debug( + "Graph conversion failed; falling back to use 'networkx' instead " + "of '%s' backend for call to '%s'", + backend_name, + self.name, + ) + return self.orig_func(*args_nx, **kwargs_nx) + import pytest + + pytest.xfail( + exc.args[0] if exc.args else f"{self.name} raised {type(exc).__name__}" + ) + + if compare_inputs_to_nx: + # Ensure input graphs are different if the function mutates an input graph. + bound_backend = self.__signature__.bind(*converted_args, **converted_kwargs) + bound_backend.apply_defaults() + bound_nx = self.__signature__.bind(*args_nx, **kwargs_nx) + bound_nx.apply_defaults() + for gname in self.graphs: + graph_nx = bound_nx.arguments[gname] + if bound_backend.arguments[gname] is graph_nx is not None: + bound_nx.arguments[gname] = graph_nx.copy() + args_nx = bound_nx.args + kwargs_nx = bound_nx.kwargs + kwargs_nx.pop("backend", None) + + _logger.debug( + "Using backend '%s' for call to '%s' with arguments: %s", + backend_name, + self.name, + _LazyArgsRepr(self, converted_args, converted_kwargs), + ) + try: + result = getattr(backend, self.name)(*converted_args, **converted_kwargs) + except NotImplementedError as exc: + if fallback_to_nx: + _logger.debug( + "Backend '%s' raised when calling '%s': %s; " + "falling back to use 'networkx' instead.", + backend_name, + self.name, + exc, + ) + return self.orig_func(*args_nx, **kwargs_nx) + import pytest + + pytest.xfail( + exc.args[0] if exc.args else f"{self.name} raised {type(exc).__name__}" + ) + + # Verify that `self._returns_graph` is correct. This compares the return type + # to the type expected from `self._returns_graph`. This handles tuple and list + # return types, but *does not* catch functions that yield graphs. + if ( + self._returns_graph + != ( + isinstance(result, nx.Graph) + or hasattr(result, "__networkx_backend__") + or isinstance(result, tuple | list) + and any( + isinstance(x, nx.Graph) or hasattr(x, "__networkx_backend__") + for x in result + ) + ) + and not ( + # May return Graph or None + self.name in {"check_planarity", "check_planarity_recursive"} + and any(x is None for x in result) + ) + and not ( + # May return Graph or dict + self.name in {"held_karp_ascent"} + and any(isinstance(x, dict) for x in result) + ) + and self.name + not in { + # yields graphs + "all_triads", + "general_k_edge_subgraphs", + # yields graphs or arrays + "nonisomorphic_trees", + } + ): + raise RuntimeError(f"`returns_graph` is incorrect for {self.name}") + + def check_result(val, depth=0): + if isinstance(val, np.number): + raise RuntimeError( + f"{self.name} returned a numpy scalar {val} ({type(val)}, depth={depth})" + ) + if isinstance(val, np.ndarray | sparray): + return + if isinstance(val, nx.Graph): + check_result(val._node, depth=depth + 1) + check_result(val._adj, depth=depth + 1) + return + if isinstance(val, Iterator): + raise NotImplementedError + if isinstance(val, Iterable) and not isinstance(val, str): + for x in val: + check_result(x, depth=depth + 1) + if isinstance(val, Mapping): + for x in val.values(): + check_result(x, depth=depth + 1) + + def check_iterator(it): + for val in it: + try: + check_result(val) + except RuntimeError as exc: + raise RuntimeError( + f"{self.name} returned a numpy scalar {val} ({type(val)})" + ) from exc + yield val + + if self.name in {"from_edgelist"}: + # numpy scalars are explicitly given as values in some tests + pass + elif isinstance(result, Iterator): + result = check_iterator(result) + else: + try: + check_result(result) + except RuntimeError as exc: + raise RuntimeError( + f"{self.name} returned a numpy scalar {result} ({type(result)})" + ) from exc + check_result(result) + + def assert_graphs_equal(G1, G2, strict=True): + assert G1.number_of_nodes() == G2.number_of_nodes() + assert G1.number_of_edges() == G2.number_of_edges() + assert G1.is_directed() is G2.is_directed() + assert G1.is_multigraph() is G2.is_multigraph() + if strict: + assert G1.graph == G2.graph + assert G1._node == G2._node + assert G1._adj == G2._adj + else: + assert set(G1) == set(G2) + assert set(G1.edges) == set(G2.edges) + + if compare_inputs_to_nx: + # Special-case algorithms that mutate input graphs + result_nx = self.orig_func(*args_nx, **kwargs_nx) + for gname in self.graphs: + G0 = bound_backend.arguments[gname] + G1 = bound_nx.arguments[gname] + if G0 is not None or G1 is not None: + G1 = backend.convert_to_nx(G1) + assert_graphs_equal(G0, G1, strict=False) + + converted_result = backend.convert_to_nx(result) + if compare_result_to_nx and isinstance(converted_result, nx.Graph): + # For graph return types (e.g. generators), we compare that results are + # the same between the backend and networkx, then return the original + # networkx result so the iteration order will be consistent in tests. + if compare_inputs_to_nx: + G = result_nx + else: + G = self.orig_func(*args_nx, **kwargs_nx) + assert_graphs_equal(G, converted_result) + return G + + return converted_result + + def _make_doc(self): + """Generate the backends section at the end for functions having an alternate + backend implementation(s) using the `backend_info` entry-point.""" + + if self.backends == {"networkx"}: + return self._orig_doc + # Add "Backends" section to the bottom of the docstring (if there are backends) + lines = [ + "Backends", + "--------", + ] + for backend in sorted(self.backends - {"networkx"}): + info = backend_info[backend] + if "short_summary" in info: + lines.append(f"{backend} : {info['short_summary']}") + else: + lines.append(backend) + if "functions" not in info or self.name not in info["functions"]: + lines.append("") + continue + + func_info = info["functions"][self.name] + + # Renaming extra_docstring to additional_docs + if func_docs := ( + func_info.get("additional_docs") or func_info.get("extra_docstring") + ): + lines.extend( + f" {line}" if line else line for line in func_docs.split("\n") + ) + add_gap = True + else: + add_gap = False + + # Renaming extra_parameters to additional_parameters + if extra_parameters := ( + func_info.get("extra_parameters") + or func_info.get("additional_parameters") + ): + if add_gap: + lines.append("") + lines.append(" Additional parameters:") + for param in sorted(extra_parameters): + lines.append(f" {param}") + if desc := extra_parameters[param]: + lines.append(f" {desc}") + lines.append("") + else: + lines.append("") + + if func_url := func_info.get("url"): + lines.append(f"[`Source <{func_url}>`_]") + lines.append("") + + # We assume the docstrings are indented by four spaces (true for now) + new_doc = self._orig_doc or "" + if not new_doc.rstrip(): + new_doc = f"The original docstring for {self.name} was empty." + if self.backends: + lines.pop() # Remove last empty line + to_add = "\n ".join(lines) + new_doc = f"{new_doc.rstrip()}\n\n {to_add}" + + # For backend-only funcs, add "Attention" admonishment after the one line summary + if "networkx" not in self.backends: + lines = new_doc.split("\n") + index = 0 + while not lines[index].strip(): + index += 1 + while index < len(lines) and lines[index].strip(): + index += 1 + backends = sorted(self.backends) + if len(backends) == 0: + example = "" + elif len(backends) == 1: + example = f' such as "{backends[0]}"' + elif len(backends) == 2: + example = f' such as "{backends[0]} or "{backends[1]}"' + else: + example = ( + " such as " + + ", ".join(f'"{x}"' for x in backends[:-1]) + + f', or "{backends[-1]}"' # Oxford comma + ) + to_add = ( + "\n .. attention:: This function does not have a default NetworkX implementation.\n" + " It may only be run with an installable :doc:`backend ` that\n" + f" supports it{example}.\n\n" + " Hint: use ``backend=...`` keyword argument to specify a backend or add\n" + " backends to ``nx.config.backend_priority``." + ) + lines.insert(index, to_add) + new_doc = "\n".join(lines) + return new_doc + + def __reduce__(self): + """Allow this object to be serialized with pickle. + + This uses the global registry `_registered_algorithms` to deserialize. + """ + return _restore_dispatchable, (self.name,) + + +def _restore_dispatchable(name): + return _registered_algorithms[name].__wrapped__ + + +def _get_cache_key( + *, + edge_attrs, + node_attrs, + preserve_edge_attrs, + preserve_node_attrs, + preserve_graph_attrs, +): + """Return key used by networkx caching given arguments for ``convert_from_nx``.""" + # edge_attrs: dict | None + # node_attrs: dict | None + # preserve_edge_attrs: bool (False if edge_attrs is not None) + # preserve_node_attrs: bool (False if node_attrs is not None) + return ( + frozenset(edge_attrs.items()) + if edge_attrs is not None + else preserve_edge_attrs, + frozenset(node_attrs.items()) + if node_attrs is not None + else preserve_node_attrs, + ) + + +def _get_from_cache(cache, key, *, backend_name=None, mutations=None): + """Search the networkx cache for a graph that is compatible with ``key``. + + Parameters + ---------- + cache : dict + If ``backend_name`` is given, then this is treated as ``G.__networkx_cache__``, + but if ``backend_name`` is None, then this is treated as the resolved inner + cache such as ``G.__networkx_cache__["backends"][backend_name]``. + key : tuple + Cache key from ``_get_cache_key``. + backend_name : str, optional + Name of the backend to control how ``cache`` is interpreted. + mutations : list, optional + Used internally to clear objects gotten from cache if inputs will be mutated. + + Returns + ------- + tuple or None + The key of the compatible graph found in the cache. + graph or "FAILED_TO_CONVERT" or None + A compatible graph if possible. "FAILED_TO_CONVERT" indicates that a previous + conversion attempt failed for this cache key. + """ + if backend_name is not None: + cache = cache.get("backends", {}).get(backend_name, {}) + if not cache: + return None, None + + # Do a simple search for a cached graph with compatible data. + # For example, if we need a single attribute, then it's okay + # to use a cached graph that preserved all attributes. + # This looks for an exact match first. + edge_key, node_key = key + for compat_key in itertools.product( + (edge_key, True) if edge_key is not True else (True,), + (node_key, True) if node_key is not True else (True,), + ): + if (rv := cache.get(compat_key)) is not None and ( + rv != FAILED_TO_CONVERT or key == compat_key + ): + if mutations is not None: + # Remove this item from the cache (after all conversions) if + # the call to this dispatchable function will mutate an input. + mutations.append((cache, compat_key)) + return compat_key, rv + + # Iterate over the items in `cache` to see if any are compatible. + # For example, if no edge attributes are needed, then a graph + # with any edge attribute will suffice. We use the same logic + # below (but switched) to clear unnecessary items from the cache. + # Use `list(cache.items())` to be thread-safe. + for (ekey, nkey), graph in list(cache.items()): + if graph == FAILED_TO_CONVERT: + # Return FAILED_TO_CONVERT if any cache key that requires a subset + # of the edge/node attributes of the given cache key has previously + # failed to convert. This logic is similar to `_set_to_cache`. + if ekey is False or edge_key is True: + pass + elif ekey is True or edge_key is False or not ekey.issubset(edge_key): + continue + if nkey is False or node_key is True: # or nkey == node_key: + pass + elif nkey is True or node_key is False or not nkey.issubset(node_key): + continue + # Save to cache for faster subsequent lookups + cache[key] = FAILED_TO_CONVERT + elif edge_key is False or ekey is True: + pass # Cache works for edge data! + elif edge_key is True or ekey is False or not edge_key.issubset(ekey): + continue # Cache missing required edge data; does not work + if node_key is False or nkey is True: + pass # Cache works for node data! + elif node_key is True or nkey is False or not node_key.issubset(nkey): + continue # Cache missing required node data; does not work + if mutations is not None: + # Remove this item from the cache (after all conversions) if + # the call to this dispatchable function will mutate an input. + mutations.append((cache, (ekey, nkey))) + return (ekey, nkey), graph + + return None, None + + +def _set_to_cache(cache, key, graph, *, backend_name=None): + """Set a backend graph to the cache, and remove unnecessary cached items. + + Parameters + ---------- + cache : dict + If ``backend_name`` is given, then this is treated as ``G.__networkx_cache__``, + but if ``backend_name`` is None, then this is treated as the resolved inner + cache such as ``G.__networkx_cache__["backends"][backend_name]``. + key : tuple + Cache key from ``_get_cache_key``. + graph : graph or "FAILED_TO_CONVERT" + Setting value to "FAILED_TO_CONVERT" prevents this conversion from being + attempted in future calls. + backend_name : str, optional + Name of the backend to control how ``cache`` is interpreted. + + Returns + ------- + dict + The items that were removed from the cache. + """ + if backend_name is not None: + cache = cache.setdefault("backends", {}).setdefault(backend_name, {}) + # Remove old cached items that are no longer necessary since they + # are dominated/subsumed/outdated by what was just calculated. + # This uses the same logic as above, but with keys switched. + # Also, don't update the cache here if the call will mutate an input. + removed = {} + edge_key, node_key = key + cache[key] = graph # Set at beginning to be thread-safe + if graph == FAILED_TO_CONVERT: + return removed + for cur_key in list(cache): + if cur_key == key: + continue + ekey, nkey = cur_key + if ekey is False or edge_key is True: + pass + elif ekey is True or edge_key is False or not ekey.issubset(edge_key): + continue + if nkey is False or node_key is True: + pass + elif nkey is True or node_key is False or not nkey.issubset(node_key): + continue + # Use pop instead of del to try to be thread-safe + if (graph := cache.pop(cur_key, None)) is not None: + removed[cur_key] = graph + return removed + + +class _LazyArgsRepr: + """Simple wrapper to display arguments of dispatchable functions in logging calls.""" + + def __init__(self, func, args, kwargs): + self.func = func + self.args = args + self.kwargs = kwargs + self.value = None + + def __repr__(self): + if self.value is None: + bound = self.func.__signature__.bind_partial(*self.args, **self.kwargs) + inner = ", ".join(f"{key}={val!r}" for key, val in bound.arguments.items()) + self.value = f"({inner})" + return self.value + + +if os.environ.get("_NETWORKX_BUILDING_DOCS_"): + # When building docs with Sphinx, use the original function with the + # dispatched __doc__, b/c Sphinx renders normal Python functions better. + # This doesn't show e.g. `*, backend=None, **backend_kwargs` in the + # signatures, which is probably okay. It does allow the docstring to be + # updated based on the installed backends. + _orig_dispatchable = _dispatchable + + def _dispatchable(func=None, **kwargs): # type: ignore[no-redef] + if func is None: + return partial(_dispatchable, **kwargs) + dispatched_func = _orig_dispatchable(func, **kwargs) + func.__doc__ = dispatched_func.__doc__ + return func + + _dispatchable.__doc__ = _orig_dispatchable.__new__.__doc__ # type: ignore[method-assign,assignment] + _sig = inspect.signature(_orig_dispatchable.__new__) + _dispatchable.__signature__ = _sig.replace( # type: ignore[method-assign,assignment] + parameters=[v for k, v in _sig.parameters.items() if k != "cls"] + ) diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/configs.py b/.venv/lib/python3.12/site-packages/networkx/utils/configs.py new file mode 100644 index 0000000..bbd9792 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/configs.py @@ -0,0 +1,391 @@ +import collections +import typing +from dataclasses import dataclass + +__all__ = ["Config"] + + +@dataclass(init=False, eq=False, slots=True, kw_only=True, match_args=False) +class Config: + """The base class for NetworkX configuration. + + There are two ways to use this to create configurations. The recommended way + is to subclass ``Config`` with docs and annotations. + + >>> class MyConfig(Config): + ... '''Breakfast!''' + ... + ... eggs: int + ... spam: int + ... + ... def _on_setattr(self, key, value): + ... assert isinstance(value, int) and value >= 0 + ... return value + >>> cfg = MyConfig(eggs=1, spam=5) + + Another way is to simply pass the initial configuration as keyword arguments to + the ``Config`` instance: + + >>> cfg1 = Config(eggs=1, spam=5) + >>> cfg1 + Config(eggs=1, spam=5) + + Once defined, config items may be modified, but can't be added or deleted by default. + ``Config`` is a ``Mapping``, and can get and set configs via attributes or brackets: + + >>> cfg.eggs = 2 + >>> cfg.eggs + 2 + >>> cfg["spam"] = 42 + >>> cfg["spam"] + 42 + + For convenience, it can also set configs within a context with the "with" statement: + + >>> with cfg(spam=3): + ... print("spam (in context):", cfg.spam) + spam (in context): 3 + >>> print("spam (after context):", cfg.spam) + spam (after context): 42 + + Subclasses may also define ``_on_setattr`` (as done in the example above) + to ensure the value being assigned is valid: + + >>> cfg.spam = -1 + Traceback (most recent call last): + ... + AssertionError + + If a more flexible configuration object is needed that allows adding and deleting + configurations, then pass ``strict=False`` when defining the subclass: + + >>> class FlexibleConfig(Config, strict=False): + ... default_greeting: str = "Hello" + >>> flexcfg = FlexibleConfig() + >>> flexcfg.name = "Mr. Anderson" + >>> flexcfg + FlexibleConfig(default_greeting='Hello', name='Mr. Anderson') + """ + + def __init_subclass__(cls, strict=True): + cls._strict = strict + + def __new__(cls, **kwargs): + orig_class = cls + if cls is Config: + # Enable the "simple" case of accepting config definition as keywords + cls = type( + cls.__name__, + (cls,), + {"__annotations__": dict.fromkeys(kwargs, typing.Any)}, + ) + cls = dataclass( + eq=False, + repr=cls._strict, + slots=cls._strict, + kw_only=True, + match_args=False, + )(cls) + if not cls._strict: + cls.__repr__ = _flexible_repr + cls._orig_class = orig_class # Save original class so we can pickle + cls._prev = None # Stage previous configs to enable use as context manager + cls._context_stack = [] # Stack of previous configs when used as context + instance = object.__new__(cls) + instance.__init__(**kwargs) + return instance + + def _on_setattr(self, key, value): + """Process config value and check whether it is valid. Useful for subclasses.""" + return value + + def _on_delattr(self, key): + """Callback for when a config item is being deleted. Useful for subclasses.""" + + # Control behavior of attributes + def __dir__(self): + return self.__dataclass_fields__.keys() + + def __setattr__(self, key, value): + if self._strict and key not in self.__dataclass_fields__: + raise AttributeError(f"Invalid config name: {key!r}") + value = self._on_setattr(key, value) + object.__setattr__(self, key, value) + self.__class__._prev = None + + def __delattr__(self, key): + if self._strict: + raise TypeError( + f"Configuration items can't be deleted (can't delete {key!r})." + ) + self._on_delattr(key) + object.__delattr__(self, key) + self.__class__._prev = None + + # Be a `collection.abc.Collection` + def __contains__(self, key): + return ( + key in self.__dataclass_fields__ if self._strict else key in self.__dict__ + ) + + def __iter__(self): + return iter(self.__dataclass_fields__ if self._strict else self.__dict__) + + def __len__(self): + return len(self.__dataclass_fields__ if self._strict else self.__dict__) + + def __reversed__(self): + return reversed(self.__dataclass_fields__ if self._strict else self.__dict__) + + # Add dunder methods for `collections.abc.Mapping` + def __getitem__(self, key): + try: + return getattr(self, key) + except AttributeError as err: + raise KeyError(*err.args) from None + + def __setitem__(self, key, value): + try: + self.__setattr__(key, value) + except AttributeError as err: + raise KeyError(*err.args) from None + + def __delitem__(self, key): + try: + self.__delattr__(key) + except AttributeError as err: + raise KeyError(*err.args) from None + + _ipython_key_completions_ = __dir__ # config[" + + # Go ahead and make it a `collections.abc.Mapping` + def get(self, key, default=None): + return getattr(self, key, default) + + def items(self): + return collections.abc.ItemsView(self) + + def keys(self): + return collections.abc.KeysView(self) + + def values(self): + return collections.abc.ValuesView(self) + + # dataclass can define __eq__ for us, but do it here so it works after pickling + def __eq__(self, other): + if not isinstance(other, Config): + return NotImplemented + return self._orig_class == other._orig_class and self.items() == other.items() + + # Make pickle work + def __reduce__(self): + return self._deserialize, (self._orig_class, dict(self)) + + @staticmethod + def _deserialize(cls, kwargs): + return cls(**kwargs) + + # Allow to be used as context manager + def __call__(self, **kwargs): + kwargs = {key: self._on_setattr(key, val) for key, val in kwargs.items()} + prev = dict(self) + for key, val in kwargs.items(): + setattr(self, key, val) + self.__class__._prev = prev + return self + + def __enter__(self): + if self.__class__._prev is None: + raise RuntimeError( + "Config being used as a context manager without config items being set. " + "Set config items via keyword arguments when calling the config object. " + "For example, using config as a context manager should be like:\n\n" + ' >>> with cfg(breakfast="spam"):\n' + " ... ... # Do stuff\n" + ) + self.__class__._context_stack.append(self.__class__._prev) + self.__class__._prev = None + return self + + def __exit__(self, exc_type, exc_value, traceback): + prev = self.__class__._context_stack.pop() + for key, val in prev.items(): + setattr(self, key, val) + + +def _flexible_repr(self): + return ( + f"{self.__class__.__qualname__}(" + + ", ".join(f"{key}={val!r}" for key, val in self.__dict__.items()) + + ")" + ) + + +# Register, b/c `Mapping.__subclasshook__` returns `NotImplemented` +collections.abc.Mapping.register(Config) + + +class BackendPriorities(Config, strict=False): + """Configuration to control automatic conversion to and calling of backends. + + Priority is given to backends listed earlier. + + Parameters + ---------- + algos : list of backend names + This controls "algorithms" such as ``nx.pagerank`` that don't return a graph. + generators : list of backend names + This controls "generators" such as ``nx.from_pandas_edgelist`` that return a graph. + kwargs : variadic keyword arguments of function name to list of backend names + This allows each function to be configured separately and will override the config + in ``algos`` or ``generators`` if present. The dispatchable function name may be + gotten from the ``.name`` attribute such as ``nx.pagerank.name`` (it's typically + the same as the name of the function). + """ + + algos: list[str] + generators: list[str] + + def _on_setattr(self, key, value): + from .backends import _registered_algorithms, backend_info + + if key in {"algos", "generators"}: + pass + elif key not in _registered_algorithms: + raise AttributeError( + f"Invalid config name: {key!r}. Expected 'algos', 'generators', or a name " + "of a dispatchable function (e.g. `.name` attribute of the function)." + ) + if not (isinstance(value, list) and all(isinstance(x, str) for x in value)): + raise TypeError( + f"{key!r} config must be a list of backend names; got {value!r}" + ) + if missing := {x for x in value if x not in backend_info}: + missing = ", ".join(map(repr, sorted(missing))) + raise ValueError(f"Unknown backend when setting {key!r}: {missing}") + return value + + def _on_delattr(self, key): + if key in {"algos", "generators"}: + raise TypeError(f"{key!r} configuration item can't be deleted.") + + +class NetworkXConfig(Config): + """Configuration for NetworkX that controls behaviors such as how to use backends. + + Attribute and bracket notation are supported for getting and setting configurations:: + + >>> nx.config.backend_priority == nx.config["backend_priority"] + True + + Parameters + ---------- + backend_priority : list of backend names or dict or BackendPriorities + Enable automatic conversion of graphs to backend graphs for functions + implemented by the backend. Priority is given to backends listed earlier. + This is a nested configuration with keys ``algos``, ``generators``, and, + optionally, function names. Setting this value to a list of backend names + will set ``nx.config.backend_priority.algos``. For more information, see + ``help(nx.config.backend_priority)``. Default is empty list. + + backends : Config mapping of backend names to backend Config + The keys of the Config mapping are names of all installed NetworkX backends, + and the values are their configurations as Config mappings. + + cache_converted_graphs : bool + If True, then save converted graphs to the cache of the input graph. Graph + conversion may occur when automatically using a backend from `backend_priority` + or when using the `backend=` keyword argument to a function call. Caching can + improve performance by avoiding repeated conversions, but it uses more memory. + Care should be taken to not manually mutate a graph that has cached graphs; for + example, ``G[u][v][k] = val`` changes the graph, but does not clear the cache. + Using methods such as ``G.add_edge(u, v, weight=val)`` will clear the cache to + keep it consistent. ``G.__networkx_cache__.clear()`` manually clears the cache. + Default is True. + + fallback_to_nx : bool + If True, then "fall back" and run with the default "networkx" implementation + for dispatchable functions not implemented by backends of input graphs. When a + backend graph is passed to a dispatchable function, the default behavior is to + use the implementation from that backend if possible and raise if not. Enabling + ``fallback_to_nx`` makes the networkx implementation the fallback to use instead + of raising, and will convert the backend graph to a networkx-compatible graph. + Default is False. + + warnings_to_ignore : set of strings + Control which warnings from NetworkX are not emitted. Valid elements: + + - `"cache"`: when a cached value is used from ``G.__networkx_cache__``. + + Notes + ----- + Environment variables may be used to control some default configurations: + + - ``NETWORKX_BACKEND_PRIORITY``: set ``backend_priority.algos`` from comma-separated names. + - ``NETWORKX_CACHE_CONVERTED_GRAPHS``: set ``cache_converted_graphs`` to True if nonempty. + - ``NETWORKX_FALLBACK_TO_NX``: set ``fallback_to_nx`` to True if nonempty. + - ``NETWORKX_WARNINGS_TO_IGNORE``: set `warnings_to_ignore` from comma-separated names. + + and can be used for finer control of ``backend_priority`` such as: + + - ``NETWORKX_BACKEND_PRIORITY_ALGOS``: same as ``NETWORKX_BACKEND_PRIORITY`` + to set ``backend_priority.algos``. + + This is a global configuration. Use with caution when using from multiple threads. + """ + + backend_priority: BackendPriorities + backends: Config + cache_converted_graphs: bool + fallback_to_nx: bool + warnings_to_ignore: set[str] + + def _on_setattr(self, key, value): + from .backends import backend_info + + if key == "backend_priority": + if isinstance(value, list): + # `config.backend_priority = [backend]` sets `backend_priority.algos` + value = BackendPriorities( + **dict( + self.backend_priority, + algos=self.backend_priority._on_setattr("algos", value), + ) + ) + elif isinstance(value, dict): + kwargs = value + value = BackendPriorities(algos=[], generators=[]) + for key, val in kwargs.items(): + setattr(value, key, val) + elif not isinstance(value, BackendPriorities): + raise TypeError( + f"{key!r} config must be a dict of lists of backend names; got {value!r}" + ) + elif key == "backends": + if not ( + isinstance(value, Config) + and all(isinstance(key, str) for key in value) + and all(isinstance(val, Config) for val in value.values()) + ): + raise TypeError( + f"{key!r} config must be a Config of backend configs; got {value!r}" + ) + if missing := {x for x in value if x not in backend_info}: + missing = ", ".join(map(repr, sorted(missing))) + raise ValueError(f"Unknown backend when setting {key!r}: {missing}") + elif key in {"cache_converted_graphs", "fallback_to_nx"}: + if not isinstance(value, bool): + raise TypeError(f"{key!r} config must be True or False; got {value!r}") + elif key == "warnings_to_ignore": + if not (isinstance(value, set) and all(isinstance(x, str) for x in value)): + raise TypeError( + f"{key!r} config must be a set of warning names; got {value!r}" + ) + known_warnings = {"cache"} + if missing := {x for x in value if x not in known_warnings}: + missing = ", ".join(map(repr, sorted(missing))) + raise ValueError( + f"Unknown warning when setting {key!r}: {missing}. Valid entries: " + + ", ".join(sorted(known_warnings)) + ) + return value diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/decorators.py b/.venv/lib/python3.12/site-packages/networkx/utils/decorators.py new file mode 100644 index 0000000..f222744 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/decorators.py @@ -0,0 +1,1233 @@ +import bz2 +import collections +import gzip +import inspect +import itertools +import re +from collections import defaultdict +from os.path import splitext +from pathlib import Path + +import networkx as nx +from networkx.utils import create_py_random_state, create_random_state + +__all__ = [ + "not_implemented_for", + "open_file", + "nodes_or_number", + "np_random_state", + "py_random_state", + "argmap", +] + + +def not_implemented_for(*graph_types): + """Decorator to mark algorithms as not implemented + + Parameters + ---------- + graph_types : container of strings + Entries must be one of "directed", "undirected", "multigraph", or "graph". + + Returns + ------- + _require : function + The decorated function. + + Raises + ------ + NetworkXNotImplemented + If any of the packages cannot be imported + + Notes + ----- + Multiple types are joined logically with "and". + For "or" use multiple @not_implemented_for() lines. + + Examples + -------- + Decorate functions like this:: + + @not_implemented_for("directed") + def sp_function(G): + pass + + + # rule out MultiDiGraph + @not_implemented_for("directed", "multigraph") + def sp_np_function(G): + pass + + + # rule out all except DiGraph + @not_implemented_for("undirected") + @not_implemented_for("multigraph") + def sp_np_function(G): + pass + """ + if ("directed" in graph_types) and ("undirected" in graph_types): + raise ValueError("Function not implemented on directed AND undirected graphs?") + if ("multigraph" in graph_types) and ("graph" in graph_types): + raise ValueError("Function not implemented on graph AND multigraphs?") + if not set(graph_types) < {"directed", "undirected", "multigraph", "graph"}: + raise KeyError( + "use one or more of directed, undirected, multigraph, graph. " + f"You used {graph_types}" + ) + + # 3-way logic: True if "directed" input, False if "undirected" input, else None + dval = ("directed" in graph_types) or "undirected" not in graph_types and None + mval = ("multigraph" in graph_types) or "graph" not in graph_types and None + errmsg = f"not implemented for {' '.join(graph_types)} type" + + def _not_implemented_for(g): + if (mval is None or mval == g.is_multigraph()) and ( + dval is None or dval == g.is_directed() + ): + raise nx.NetworkXNotImplemented(errmsg) + + return g + + return argmap(_not_implemented_for, 0) + + +# To handle new extensions, define a function accepting a `path` and `mode`. +# Then add the extension to _dispatch_dict. +fopeners = { + ".gz": gzip.open, + ".gzip": gzip.open, + ".bz2": bz2.BZ2File, +} +_dispatch_dict = defaultdict(lambda: open, **fopeners) + + +def open_file(path_arg, mode="r"): + """Decorator to ensure clean opening and closing of files. + + Parameters + ---------- + path_arg : string or int + Name or index of the argument that is a path. + + mode : str + String for opening mode. + + Returns + ------- + _open_file : function + Function which cleanly executes the io. + + Examples + -------- + Decorate functions like this:: + + @open_file(0, "r") + def read_function(pathname): + pass + + + @open_file(1, "w") + def write_function(G, pathname): + pass + + + @open_file(1, "w") + def write_function(G, pathname="graph.dot"): + pass + + + @open_file("pathname", "w") + def write_function(G, pathname="graph.dot"): + pass + + + @open_file("path", "w+") + def another_function(arg, **kwargs): + path = kwargs["path"] + pass + + Notes + ----- + Note that this decorator solves the problem when a path argument is + specified as a string, but it does not handle the situation when the + function wants to accept a default of None (and then handle it). + + Here is an example of how to handle this case:: + + @open_file("path") + def some_function(arg1, arg2, path=None): + if path is None: + fobj = tempfile.NamedTemporaryFile(delete=False) + else: + # `path` could have been a string or file object or something + # similar. In any event, the decorator has given us a file object + # and it will close it for us, if it should. + fobj = path + + try: + fobj.write("blah") + finally: + if path is None: + fobj.close() + + Normally, we'd want to use "with" to ensure that fobj gets closed. + However, the decorator will make `path` a file object for us, + and using "with" would undesirably close that file object. + Instead, we use a try block, as shown above. + When we exit the function, fobj will be closed, if it should be, by the decorator. + """ + + def _open_file(path): + # Now we have the path_arg. There are two types of input to consider: + # 1) string representing a path that should be opened + # 2) an already opened file object + if isinstance(path, str): + ext = splitext(path)[1] + elif isinstance(path, Path): + # path is a pathlib reference to a filename + ext = path.suffix + path = str(path) + else: + # could be None, or a file handle, in which case the algorithm will deal with it + return path, lambda: None + + fobj = _dispatch_dict[ext](path, mode=mode) + return fobj, lambda: fobj.close() + + return argmap(_open_file, path_arg, try_finally=True) + + +def nodes_or_number(which_args): + """Decorator to allow number of nodes or container of nodes. + + With this decorator, the specified argument can be either a number or a container + of nodes. If it is a number, the nodes used are `range(n)`. + This allows `nx.complete_graph(50)` in place of `nx.complete_graph(list(range(50)))`. + And it also allows `nx.complete_graph(any_list_of_nodes)`. + + Parameters + ---------- + which_args : string or int or sequence of strings or ints + If string, the name of the argument to be treated. + If int, the index of the argument to be treated. + If more than one node argument is allowed, can be a list of locations. + + Returns + ------- + _nodes_or_numbers : function + Function which replaces int args with ranges. + + Examples + -------- + Decorate functions like this:: + + @nodes_or_number("nodes") + def empty_graph(nodes): + # nodes is converted to a list of nodes + + @nodes_or_number(0) + def empty_graph(nodes): + # nodes is converted to a list of nodes + + @nodes_or_number(["m1", "m2"]) + def grid_2d_graph(m1, m2, periodic=False): + # m1 and m2 are each converted to a list of nodes + + @nodes_or_number([0, 1]) + def grid_2d_graph(m1, m2, periodic=False): + # m1 and m2 are each converted to a list of nodes + + @nodes_or_number(1) + def full_rary_tree(r, n) + # presumably r is a number. It is not handled by this decorator. + # n is converted to a list of nodes + """ + + def _nodes_or_number(n): + try: + nodes = list(range(n)) + except TypeError: + nodes = tuple(n) + else: + if n < 0: + raise nx.NetworkXError(f"Negative number of nodes not valid: {n}") + return (n, nodes) + + try: + iter_wa = iter(which_args) + except TypeError: + iter_wa = (which_args,) + + return argmap(_nodes_or_number, *iter_wa) + + +def np_random_state(random_state_argument): + """Decorator to generate a numpy RandomState or Generator instance. + + The decorator processes the argument indicated by `random_state_argument` + using :func:`nx.utils.create_random_state`. + The argument value can be a seed (integer), or a `numpy.random.RandomState` + or `numpy.random.RandomState` instance or (`None` or `numpy.random`). + The latter two options use the global random number generator for `numpy.random`. + + The returned instance is a `numpy.random.RandomState` or `numpy.random.Generator`. + + Parameters + ---------- + random_state_argument : string or int + The name or index of the argument to be converted + to a `numpy.random.RandomState` instance. + + Returns + ------- + _random_state : function + Function whose random_state keyword argument is a RandomState instance. + + Examples + -------- + Decorate functions like this:: + + @np_random_state("seed") + def random_float(seed=None): + return seed.rand() + + + @np_random_state(0) + def random_float(rng=None): + return rng.rand() + + + @np_random_state(1) + def random_array(dims, random_state=1): + return random_state.rand(*dims) + + See Also + -------- + py_random_state + """ + return argmap(create_random_state, random_state_argument) + + +def py_random_state(random_state_argument): + """Decorator to generate a random.Random instance (or equiv). + + This decorator processes `random_state_argument` using + :func:`nx.utils.create_py_random_state`. + The input value can be a seed (integer), or a random number generator:: + + If int, return a random.Random instance set with seed=int. + If random.Random instance, return it. + If None or the `random` package, return the global random number + generator used by `random`. + If np.random package, or the default numpy RandomState instance, + return the default numpy random number generator wrapped in a + `PythonRandomViaNumpyBits` class. + If np.random.Generator instance, return it wrapped in a + `PythonRandomViaNumpyBits` class. + + # Legacy options + If np.random.RandomState instance, return it wrapped in a + `PythonRandomInterface` class. + If a `PythonRandomInterface` instance, return it + + Parameters + ---------- + random_state_argument : string or int + The name of the argument or the index of the argument in args that is + to be converted to the random.Random instance or numpy.random.RandomState + instance that mimics basic methods of random.Random. + + Returns + ------- + _random_state : function + Function whose random_state_argument is converted to a Random instance. + + Examples + -------- + Decorate functions like this:: + + @py_random_state("random_state") + def random_float(random_state=None): + return random_state.rand() + + + @py_random_state(0) + def random_float(rng=None): + return rng.rand() + + + @py_random_state(1) + def random_array(dims, seed=12345): + return seed.rand(*dims) + + See Also + -------- + np_random_state + """ + + return argmap(create_py_random_state, random_state_argument) + + +class argmap: + """A decorator to apply a map to arguments before calling the function + + This class provides a decorator that maps (transforms) arguments of the function + before the function is called. Thus for example, we have similar code + in many functions to determine whether an argument is the number of nodes + to be created, or a list of nodes to be handled. The decorator provides + the code to accept either -- transforming the indicated argument into a + list of nodes before the actual function is called. + + This decorator class allows us to process single or multiple arguments. + The arguments to be processed can be specified by string, naming the argument, + or by index, specifying the item in the args list. + + Parameters + ---------- + func : callable + The function to apply to arguments + + *args : iterable of (int, str or tuple) + A list of parameters, specified either as strings (their names), ints + (numerical indices) or tuples, which may contain ints, strings, and + (recursively) tuples. Each indicates which parameters the decorator + should map. Tuples indicate that the map function takes (and returns) + multiple parameters in the same order and nested structure as indicated + here. + + try_finally : bool (default: False) + When True, wrap the function call in a try-finally block with code + for the finally block created by `func`. This is used when the map + function constructs an object (like a file handle) that requires + post-processing (like closing). + + Note: try_finally decorators cannot be used to decorate generator + functions. + + Examples + -------- + Most of these examples use `@argmap(...)` to apply the decorator to + the function defined on the next line. + In the NetworkX codebase however, `argmap` is used within a function to + construct a decorator. That is, the decorator defines a mapping function + and then uses `argmap` to build and return a decorated function. + A simple example is a decorator that specifies which currency to report money. + The decorator (named `convert_to`) would be used like:: + + @convert_to("US_Dollars", "income") + def show_me_the_money(name, income): + print(f"{name} : {income}") + + And the code to create the decorator might be:: + + def convert_to(currency, which_arg): + def _convert(amount): + if amount.currency != currency: + amount = amount.to_currency(currency) + return amount + + return argmap(_convert, which_arg) + + Despite this common idiom for argmap, most of the following examples + use the `@argmap(...)` idiom to save space. + + Here's an example use of argmap to sum the elements of two of the functions + arguments. The decorated function:: + + @argmap(sum, "xlist", "zlist") + def foo(xlist, y, zlist): + return xlist - y + zlist + + is syntactic sugar for:: + + def foo(xlist, y, zlist): + x = sum(xlist) + z = sum(zlist) + return x - y + z + + and is equivalent to (using argument indexes):: + + @argmap(sum, "xlist", 2) + def foo(xlist, y, zlist): + return xlist - y + zlist + + or:: + + @argmap(sum, "zlist", 0) + def foo(xlist, y, zlist): + return xlist - y + zlist + + Transforming functions can be applied to multiple arguments, such as:: + + def swap(x, y): + return y, x + + # the 2-tuple tells argmap that the map `swap` has 2 inputs/outputs. + @argmap(swap, ("a", "b")): + def foo(a, b, c): + return a / b * c + + is equivalent to:: + + def foo(a, b, c): + a, b = swap(a, b) + return a / b * c + + More generally, the applied arguments can be nested tuples of strings or ints. + The syntax `@argmap(some_func, ("a", ("b", "c")))` would expect `some_func` to + accept 2 inputs with the second expected to be a 2-tuple. It should then return + 2 outputs with the second a 2-tuple. The returns values would replace input "a" + "b" and "c" respectively. Similarly for `@argmap(some_func, (0, ("b", 2)))`. + + Also, note that an index larger than the number of named parameters is allowed + for variadic functions. For example:: + + def double(a): + return 2 * a + + + @argmap(double, 3) + def overflow(a, *args): + return a, args + + + print(overflow(1, 2, 3, 4, 5, 6)) # output is 1, (2, 3, 8, 5, 6) + + **Try Finally** + + Additionally, this `argmap` class can be used to create a decorator that + initiates a try...finally block. The decorator must be written to return + both the transformed argument and a closing function. + This feature was included to enable the `open_file` decorator which might + need to close the file or not depending on whether it had to open that file. + This feature uses the keyword-only `try_finally` argument to `@argmap`. + + For example this map opens a file and then makes sure it is closed:: + + def open_file(fn): + f = open(fn) + return f, lambda: f.close() + + The decorator applies that to the function `foo`:: + + @argmap(open_file, "file", try_finally=True) + def foo(file): + print(file.read()) + + is syntactic sugar for:: + + def foo(file): + file, close_file = open_file(file) + try: + print(file.read()) + finally: + close_file() + + and is equivalent to (using indexes):: + + @argmap(open_file, 0, try_finally=True) + def foo(file): + print(file.read()) + + Here's an example of the try_finally feature used to create a decorator:: + + def my_closing_decorator(which_arg): + def _opener(path): + if path is None: + path = open(path) + fclose = path.close + else: + # assume `path` handles the closing + fclose = lambda: None + return path, fclose + + return argmap(_opener, which_arg, try_finally=True) + + which can then be used as:: + + @my_closing_decorator("file") + def fancy_reader(file=None): + # this code doesn't need to worry about closing the file + print(file.read()) + + Decorators with try_finally = True cannot be used with generator functions, + because the `finally` block is evaluated before the generator is exhausted:: + + @argmap(open_file, "file", try_finally=True) + def file_to_lines(file): + for line in file.readlines(): + yield line + + is equivalent to:: + + def file_to_lines_wrapped(file): + for line in file.readlines(): + yield line + + + def file_to_lines_wrapper(file): + try: + file = open_file(file) + return file_to_lines_wrapped(file) + finally: + file.close() + + which behaves similarly to:: + + def file_to_lines_whoops(file): + file = open_file(file) + file.close() + for line in file.readlines(): + yield line + + because the `finally` block of `file_to_lines_wrapper` is executed before + the caller has a chance to exhaust the iterator. + + Notes + ----- + An object of this class is callable and intended to be used when + defining a decorator. Generally, a decorator takes a function as input + and constructs a function as output. Specifically, an `argmap` object + returns the input function decorated/wrapped so that specified arguments + are mapped (transformed) to new values before the decorated function is called. + + As an overview, the argmap object returns a new function with all the + dunder values of the original function (like `__doc__`, `__name__`, etc). + Code for this decorated function is built based on the original function's + signature. It starts by mapping the input arguments to potentially new + values. Then it calls the decorated function with these new values in place + of the indicated arguments that have been mapped. The return value of the + original function is then returned. This new function is the function that + is actually called by the user. + + Three additional features are provided. + 1) The code is lazily compiled. That is, the new function is returned + as an object without the code compiled, but with all information + needed so it can be compiled upon it's first invocation. This saves + time on import at the cost of additional time on the first call of + the function. Subsequent calls are then just as fast as normal. + + 2) If the "try_finally" keyword-only argument is True, a try block + follows each mapped argument, matched on the other side of the wrapped + call, by a finally block closing that mapping. We expect func to return + a 2-tuple: the mapped value and a function to be called in the finally + clause. This feature was included so the `open_file` decorator could + provide a file handle to the decorated function and close the file handle + after the function call. It even keeps track of whether to close the file + handle or not based on whether it had to open the file or the input was + already open. So, the decorated function does not need to include any + code to open or close files. + + 3) The maps applied can process multiple arguments. For example, + you could swap two arguments using a mapping, or transform + them to their sum and their difference. This was included to allow + a decorator in the `quality.py` module that checks that an input + `partition` is a valid partition of the nodes of the input graph `G`. + In this example, the map has inputs `(G, partition)`. After checking + for a valid partition, the map either raises an exception or leaves + the inputs unchanged. Thus many functions that make this check can + use the decorator rather than copy the checking code into each function. + More complicated nested argument structures are described below. + + The remaining notes describe the code structure and methods for this + class in broad terms to aid in understanding how to use it. + + Instantiating an `argmap` object simply stores the mapping function and + the input identifiers of which arguments to map. The resulting decorator + is ready to use this map to decorate any function. Calling that object + (`argmap.__call__`, but usually done via `@my_decorator`) a lazily + compiled thin wrapper of the decorated function is constructed, + wrapped with the necessary function dunder attributes like `__doc__` + and `__name__`. That thinly wrapped function is returned as the + decorated function. When that decorated function is called, the thin + wrapper of code calls `argmap._lazy_compile` which compiles the decorated + function (using `argmap.compile`) and replaces the code of the thin + wrapper with the newly compiled code. This saves the compilation step + every import of networkx, at the cost of compiling upon the first call + to the decorated function. + + When the decorated function is compiled, the code is recursively assembled + using the `argmap.assemble` method. The recursive nature is needed in + case of nested decorators. The result of the assembly is a number of + useful objects. + + sig : the function signature of the original decorated function as + constructed by :func:`argmap.signature`. This is constructed + using `inspect.signature` but enhanced with attribute + strings `sig_def` and `sig_call`, and other information + specific to mapping arguments of this function. + This information is used to construct a string of code defining + the new decorated function. + + wrapped_name : a unique internally used name constructed by argmap + for the decorated function. + + functions : a dict of the functions used inside the code of this + decorated function, to be used as `globals` in `exec`. + This dict is recursively updated to allow for nested decorating. + + mapblock : code (as a list of strings) to map the incoming argument + values to their mapped values. + + finallys : code (as a list of strings) to provide the possibly nested + set of finally clauses if needed. + + mutable_args : a bool indicating whether the `sig.args` tuple should be + converted to a list so mutation can occur. + + After this recursive assembly process, the `argmap.compile` method + constructs code (as strings) to convert the tuple `sig.args` to a list + if needed. It joins the defining code with appropriate indents and + compiles the result. Finally, this code is evaluated and the original + wrapper's implementation is replaced with the compiled version (see + `argmap._lazy_compile` for more details). + + Other `argmap` methods include `_name` and `_count` which allow internally + generated names to be unique within a python session. + The methods `_flatten` and `_indent` process the nested lists of strings + into properly indented python code ready to be compiled. + + More complicated nested tuples of arguments also allowed though + usually not used. For the simple 2 argument case, the argmap + input ("a", "b") implies the mapping function will take 2 arguments + and return a 2-tuple of mapped values. A more complicated example + with argmap input `("a", ("b", "c"))` requires the mapping function + take 2 inputs, with the second being a 2-tuple. It then must output + the 3 mapped values in the same nested structure `(newa, (newb, newc))`. + This level of generality is not often needed, but was convenient + to implement when handling the multiple arguments. + + See Also + -------- + not_implemented_for + open_file + nodes_or_number + py_random_state + networkx.algorithms.community.quality.require_partition + + """ + + def __init__(self, func, *args, try_finally=False): + self._func = func + self._args = args + self._finally = try_finally + + @staticmethod + def _lazy_compile(func): + """Compile the source of a wrapped function + + Assemble and compile the decorated function, and intrusively replace its + code with the compiled version's. The thinly wrapped function becomes + the decorated function. + + Parameters + ---------- + func : callable + A function returned by argmap.__call__ which is in the process + of being called for the first time. + + Returns + ------- + func : callable + The same function, with a new __code__ object. + + Notes + ----- + It was observed in NetworkX issue #4732 [1] that the import time of + NetworkX was significantly bloated by the use of decorators: over half + of the import time was being spent decorating functions. This was + somewhat improved by a change made to the `decorator` library, at the + cost of a relatively heavy-weight call to `inspect.Signature.bind` + for each call to the decorated function. + + The workaround we arrived at is to do minimal work at the time of + decoration. When the decorated function is called for the first time, + we compile a function with the same function signature as the wrapped + function. The resulting decorated function is faster than one made by + the `decorator` library, so that the overhead of the first call is + 'paid off' after a small number of calls. + + References + ---------- + + [1] https://github.com/networkx/networkx/issues/4732 + + """ + real_func = func.__argmap__.compile(func.__wrapped__) + func.__code__ = real_func.__code__ + func.__globals__.update(real_func.__globals__) + func.__dict__.update(real_func.__dict__) + return func + + def __call__(self, f): + """Construct a lazily decorated wrapper of f. + + The decorated function will be compiled when it is called for the first time, + and it will replace its own __code__ object so subsequent calls are fast. + + Parameters + ---------- + f : callable + A function to be decorated. + + Returns + ------- + func : callable + The decorated function. + + See Also + -------- + argmap._lazy_compile + """ + + def func(*args, __wrapper=None, **kwargs): + return argmap._lazy_compile(__wrapper)(*args, **kwargs) + + # standard function-wrapping stuff + func.__name__ = f.__name__ + func.__doc__ = f.__doc__ + func.__defaults__ = f.__defaults__ + func.__kwdefaults__.update(f.__kwdefaults__ or {}) + func.__module__ = f.__module__ + func.__qualname__ = f.__qualname__ + func.__dict__.update(f.__dict__) + func.__wrapped__ = f + + # now that we've wrapped f, we may have picked up some __dict__ or + # __kwdefaults__ items that were set by a previous argmap. Thus, we set + # these values after those update() calls. + + # If we attempt to access func from within itself, that happens through + # a closure -- which trips an error when we replace func.__code__. The + # standard workaround for functions which can't see themselves is to use + # a Y-combinator, as we do here. + func.__kwdefaults__["_argmap__wrapper"] = func + + # this self-reference is here because functools.wraps preserves + # everything in __dict__, and we don't want to mistake a non-argmap + # wrapper for an argmap wrapper + func.__self__ = func + + # this is used to variously call self.assemble and self.compile + func.__argmap__ = self + + if hasattr(f, "__argmap__"): + func.__is_generator = f.__is_generator + else: + func.__is_generator = inspect.isgeneratorfunction(f) + + if self._finally and func.__is_generator: + raise nx.NetworkXError("argmap cannot decorate generators with try_finally") + + return func + + __count = 0 + + @classmethod + def _count(cls): + """Maintain a globally-unique identifier for function names and "file" names + + Note that this counter is a class method reporting a class variable + so the count is unique within a Python session. It could differ from + session to session for a specific decorator depending on the order + that the decorators are created. But that doesn't disrupt `argmap`. + + This is used in two places: to construct unique variable names + in the `_name` method and to construct unique fictitious filenames + in the `_compile` method. + + Returns + ------- + count : int + An integer unique to this Python session (simply counts from zero) + """ + cls.__count += 1 + return cls.__count + + _bad_chars = re.compile("[^a-zA-Z0-9_]") + + @classmethod + def _name(cls, f): + """Mangle the name of a function to be unique but somewhat human-readable + + The names are unique within a Python session and set using `_count`. + + Parameters + ---------- + f : str or object + + Returns + ------- + name : str + The mangled version of `f.__name__` (if `f.__name__` exists) or `f` + + """ + f = f.__name__ if hasattr(f, "__name__") else f + fname = re.sub(cls._bad_chars, "_", f) + return f"argmap_{fname}_{cls._count()}" + + def compile(self, f): + """Compile the decorated function. + + Called once for a given decorated function -- collects the code from all + argmap decorators in the stack, and compiles the decorated function. + + Much of the work done here uses the `assemble` method to allow recursive + treatment of multiple argmap decorators on a single decorated function. + That flattens the argmap decorators, collects the source code to construct + a single decorated function, then compiles/executes/returns that function. + + The source code for the decorated function is stored as an attribute + `_code` on the function object itself. + + Note that Python's `compile` function requires a filename, but this + code is constructed without a file, so a fictitious filename is used + to describe where the function comes from. The name is something like: + "argmap compilation 4". + + Parameters + ---------- + f : callable + The function to be decorated + + Returns + ------- + func : callable + The decorated file + + """ + sig, wrapped_name, functions, mapblock, finallys, mutable_args = self.assemble( + f + ) + + call = f"{sig.call_sig.format(wrapped_name)}#" + mut_args = f"{sig.args} = list({sig.args})" if mutable_args else "" + body = argmap._indent(sig.def_sig, mut_args, mapblock, call, finallys) + code = "\n".join(body) + + locl = {} + globl = dict(functions.values()) + filename = f"{self.__class__} compilation {self._count()}" + compiled = compile(code, filename, "exec") + exec(compiled, globl, locl) + func = locl[sig.name] + func._code = code + return func + + def assemble(self, f): + """Collects components of the source for the decorated function wrapping f. + + If `f` has multiple argmap decorators, we recursively assemble the stack of + decorators into a single flattened function. + + This method is part of the `compile` method's process yet separated + from that method to allow recursive processing. The outputs are + strings, dictionaries and lists that collect needed info to + flatten any nested argmap-decoration. + + Parameters + ---------- + f : callable + The function to be decorated. If f is argmapped, we assemble it. + + Returns + ------- + sig : argmap.Signature + The function signature as an `argmap.Signature` object. + wrapped_name : str + The mangled name used to represent the wrapped function in the code + being assembled. + functions : dict + A dictionary mapping id(g) -> (mangled_name(g), g) for functions g + referred to in the code being assembled. These need to be present + in the ``globals`` scope of ``exec`` when defining the decorated + function. + mapblock : list of lists and/or strings + Code that implements mapping of parameters including any try blocks + if needed. This code will precede the decorated function call. + finallys : list of lists and/or strings + Code that implements the finally blocks to post-process the + arguments (usually close any files if needed) after the + decorated function is called. + mutable_args : bool + True if the decorator needs to modify positional arguments + via their indices. The compile method then turns the argument + tuple into a list so that the arguments can be modified. + """ + + # first, we check if f is already argmapped -- if that's the case, + # build up the function recursively. + # > mapblock is generally a list of function calls of the sort + # arg = func(arg) + # in addition to some try-blocks if needed. + # > finallys is a recursive list of finally blocks of the sort + # finally: + # close_func_1() + # finally: + # close_func_2() + # > functions is a dict of functions used in the scope of our decorated + # function. It will be used to construct globals used in compilation. + # We make functions[id(f)] = name_of_f, f to ensure that a given + # function is stored and named exactly once even if called by + # nested decorators. + if hasattr(f, "__argmap__") and f.__self__ is f: + ( + sig, + wrapped_name, + functions, + mapblock, + finallys, + mutable_args, + ) = f.__argmap__.assemble(f.__wrapped__) + functions = dict(functions) # shallow-copy just in case + else: + sig = self.signature(f) + wrapped_name = self._name(f) + mapblock, finallys = [], [] + functions = {id(f): (wrapped_name, f)} + mutable_args = False + + if id(self._func) in functions: + fname, _ = functions[id(self._func)] + else: + fname, _ = functions[id(self._func)] = self._name(self._func), self._func + + # this is a bit complicated -- we can call functions with a variety of + # nested arguments, so long as their input and output are tuples with + # the same nested structure. e.g. ("a", "b") maps arguments a and b. + # A more complicated nesting like (0, (3, 4)) maps arguments 0, 3, 4 + # expecting the mapping to output new values in the same nested shape. + # The ability to argmap multiple arguments was necessary for + # the decorator `nx.algorithms.community.quality.require_partition`, and + # while we're not taking full advantage of the ability to handle + # multiply-nested tuples, it was convenient to implement this in + # generality because the recursive call to `get_name` is necessary in + # any case. + applied = set() + + def get_name(arg, first=True): + nonlocal mutable_args + if isinstance(arg, tuple): + name = ", ".join(get_name(x, False) for x in arg) + return name if first else f"({name})" + if arg in applied: + raise nx.NetworkXError(f"argument {arg} is specified multiple times") + applied.add(arg) + if arg in sig.names: + return sig.names[arg] + elif isinstance(arg, str): + if sig.kwargs is None: + raise nx.NetworkXError( + f"name {arg} is not a named parameter and this function doesn't have kwargs" + ) + return f"{sig.kwargs}[{arg!r}]" + else: + if sig.args is None: + raise nx.NetworkXError( + f"index {arg} not a parameter index and this function doesn't have args" + ) + mutable_args = True + return f"{sig.args}[{arg - sig.n_positional}]" + + if self._finally: + # here's where we handle try_finally decorators. Such a decorator + # returns a mapped argument and a function to be called in a + # finally block. This feature was required by the open_file + # decorator. The below generates the code + # + # name, final = func(name) #<--append to mapblock + # try: #<--append to mapblock + # ... more argmapping and try blocks + # return WRAPPED_FUNCTION(...) + # ... more finally blocks + # finally: #<--prepend to finallys + # final() #<--prepend to finallys + # + for a in self._args: + name = get_name(a) + final = self._name(name) + mapblock.append(f"{name}, {final} = {fname}({name})") + mapblock.append("try:") + finallys = ["finally:", f"{final}()#", "#", finallys] + else: + mapblock.extend( + f"{name} = {fname}({name})" for name in map(get_name, self._args) + ) + + return sig, wrapped_name, functions, mapblock, finallys, mutable_args + + @classmethod + def signature(cls, f): + r"""Construct a Signature object describing `f` + + Compute a Signature so that we can write a function wrapping f with + the same signature and call-type. + + Parameters + ---------- + f : callable + A function to be decorated + + Returns + ------- + sig : argmap.Signature + The Signature of f + + Notes + ----- + The Signature is a namedtuple with names: + + name : a unique version of the name of the decorated function + signature : the inspect.signature of the decorated function + def_sig : a string used as code to define the new function + call_sig : a string used as code to call the decorated function + names : a dict keyed by argument name and index to the argument's name + n_positional : the number of positional arguments in the signature + args : the name of the VAR_POSITIONAL argument if any, i.e. \*theseargs + kwargs : the name of the VAR_KEYWORDS argument if any, i.e. \*\*kwargs + + These named attributes of the signature are used in `assemble` and `compile` + to construct a string of source code for the decorated function. + + """ + sig = inspect.signature(f, follow_wrapped=False) + def_sig = [] + call_sig = [] + names = {} + + kind = None + args = None + kwargs = None + npos = 0 + for i, param in enumerate(sig.parameters.values()): + # parameters can be position-only, keyword-or-position, keyword-only + # in any combination, but only in the order as above. we do edge + # detection to add the appropriate punctuation + prev = kind + kind = param.kind + if prev == param.POSITIONAL_ONLY != kind: + # the last token was position-only, but this one isn't + def_sig.append("/") + if ( + param.VAR_POSITIONAL + != prev + != param.KEYWORD_ONLY + == kind + != param.VAR_POSITIONAL + ): + # param is the first keyword-only arg and isn't starred + def_sig.append("*") + + # star arguments as appropriate + if kind == param.VAR_POSITIONAL: + name = "*" + param.name + args = param.name + count = 0 + elif kind == param.VAR_KEYWORD: + name = "**" + param.name + kwargs = param.name + count = 0 + else: + names[i] = names[param.name] = param.name + name = param.name + count = 1 + + # assign to keyword-only args in the function call + if kind == param.KEYWORD_ONLY: + call_sig.append(f"{name} = {name}") + else: + npos += count + call_sig.append(name) + + def_sig.append(name) + + fname = cls._name(f) + def_sig = f"def {fname}({', '.join(def_sig)}):" + + call_sig = f"return {{}}({', '.join(call_sig)})" + + return cls.Signature(fname, sig, def_sig, call_sig, names, npos, args, kwargs) + + Signature = collections.namedtuple( + "Signature", + [ + "name", + "signature", + "def_sig", + "call_sig", + "names", + "n_positional", + "args", + "kwargs", + ], + ) + + @staticmethod + def _flatten(nestlist, visited): + """flattens a recursive list of lists that doesn't have cyclic references + + Parameters + ---------- + nestlist : iterable + A recursive list of objects to be flattened into a single iterable + + visited : set + A set of object ids which have been walked -- initialize with an + empty set + + Yields + ------ + Non-list objects contained in nestlist + + """ + for thing in nestlist: + if isinstance(thing, list): + if id(thing) in visited: + raise ValueError("A cycle was found in nestlist. Be a tree.") + else: + visited.add(id(thing)) + yield from argmap._flatten(thing, visited) + else: + yield thing + + _tabs = " " * 64 + + @staticmethod + def _indent(*lines): + """Indent list of code lines to make executable Python code + + Indents a tree-recursive list of strings, following the rule that one + space is added to the tab after a line that ends in a colon, and one is + removed after a line that ends in an hashmark. + + Parameters + ---------- + *lines : lists and/or strings + A recursive list of strings to be assembled into properly indented + code. + + Returns + ------- + code : str + + Examples + -------- + + argmap._indent(*["try:", "try:", "pass#", "finally:", "pass#", "#", + "finally:", "pass#"]) + + renders to + + '''try: + try: + pass# + finally: + pass# + # + finally: + pass#''' + """ + depth = 0 + for line in argmap._flatten(lines, set()): + yield f"{argmap._tabs[:depth]}{line}" + depth += (line[-1:] == ":") - (line[-1:] == "#") diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/heaps.py b/.venv/lib/python3.12/site-packages/networkx/utils/heaps.py new file mode 100644 index 0000000..2d67dfd --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/heaps.py @@ -0,0 +1,338 @@ +""" +Min-heaps. +""" + +from heapq import heappop, heappush +from itertools import count + +import networkx as nx + +__all__ = ["MinHeap", "PairingHeap", "BinaryHeap"] + + +class MinHeap: + """Base class for min-heaps. + + A MinHeap stores a collection of key-value pairs ordered by their values. + It supports querying the minimum pair, inserting a new pair, decreasing the + value in an existing pair and deleting the minimum pair. + """ + + class _Item: + """Used by subclassess to represent a key-value pair.""" + + __slots__ = ("key", "value") + + def __init__(self, key, value): + self.key = key + self.value = value + + def __repr__(self): + return repr((self.key, self.value)) + + def __init__(self): + """Initialize a new min-heap.""" + self._dict = {} + + def min(self): + """Query the minimum key-value pair. + + Returns + ------- + key, value : tuple + The key-value pair with the minimum value in the heap. + + Raises + ------ + NetworkXError + If the heap is empty. + """ + raise NotImplementedError + + def pop(self): + """Delete the minimum pair in the heap. + + Returns + ------- + key, value : tuple + The key-value pair with the minimum value in the heap. + + Raises + ------ + NetworkXError + If the heap is empty. + """ + raise NotImplementedError + + def get(self, key, default=None): + """Returns the value associated with a key. + + Parameters + ---------- + key : hashable object + The key to be looked up. + + default : object + Default value to return if the key is not present in the heap. + Default value: None. + + Returns + ------- + value : object. + The value associated with the key. + """ + raise NotImplementedError + + def insert(self, key, value, allow_increase=False): + """Insert a new key-value pair or modify the value in an existing + pair. + + Parameters + ---------- + key : hashable object + The key. + + value : object comparable with existing values. + The value. + + allow_increase : bool + Whether the value is allowed to increase. If False, attempts to + increase an existing value have no effect. Default value: False. + + Returns + ------- + decreased : bool + True if a pair is inserted or the existing value is decreased. + """ + raise NotImplementedError + + def __nonzero__(self): + """Returns whether the heap if empty.""" + return bool(self._dict) + + def __bool__(self): + """Returns whether the heap if empty.""" + return bool(self._dict) + + def __len__(self): + """Returns the number of key-value pairs in the heap.""" + return len(self._dict) + + def __contains__(self, key): + """Returns whether a key exists in the heap. + + Parameters + ---------- + key : any hashable object. + The key to be looked up. + """ + return key in self._dict + + +class PairingHeap(MinHeap): + """A pairing heap.""" + + class _Node(MinHeap._Item): + """A node in a pairing heap. + + A tree in a pairing heap is stored using the left-child, right-sibling + representation. + """ + + __slots__ = ("left", "next", "prev", "parent") + + def __init__(self, key, value): + super().__init__(key, value) + # The leftmost child. + self.left = None + # The next sibling. + self.next = None + # The previous sibling. + self.prev = None + # The parent. + self.parent = None + + def __init__(self): + """Initialize a pairing heap.""" + super().__init__() + self._root = None + + def min(self): + if self._root is None: + raise nx.NetworkXError("heap is empty.") + return (self._root.key, self._root.value) + + def pop(self): + if self._root is None: + raise nx.NetworkXError("heap is empty.") + min_node = self._root + self._root = self._merge_children(self._root) + del self._dict[min_node.key] + return (min_node.key, min_node.value) + + def get(self, key, default=None): + node = self._dict.get(key) + return node.value if node is not None else default + + def insert(self, key, value, allow_increase=False): + node = self._dict.get(key) + root = self._root + if node is not None: + if value < node.value: + node.value = value + if node is not root and value < node.parent.value: + self._cut(node) + self._root = self._link(root, node) + return True + elif allow_increase and value > node.value: + node.value = value + child = self._merge_children(node) + # Nonstandard step: Link the merged subtree with the root. See + # below for the standard step. + if child is not None: + self._root = self._link(self._root, child) + # Standard step: Perform a decrease followed by a pop as if the + # value were the smallest in the heap. Then insert the new + # value into the heap. + # if node is not root: + # self._cut(node) + # if child is not None: + # root = self._link(root, child) + # self._root = self._link(root, node) + # else: + # self._root = (self._link(node, child) + # if child is not None else node) + return False + else: + # Insert a new key. + node = self._Node(key, value) + self._dict[key] = node + self._root = self._link(root, node) if root is not None else node + return True + + def _link(self, root, other): + """Link two nodes, making the one with the smaller value the parent of + the other. + """ + if other.value < root.value: + root, other = other, root + next = root.left + other.next = next + if next is not None: + next.prev = other + other.prev = None + root.left = other + other.parent = root + return root + + def _merge_children(self, root): + """Merge the subtrees of the root using the standard two-pass method. + The resulting subtree is detached from the root. + """ + node = root.left + root.left = None + if node is not None: + link = self._link + # Pass 1: Merge pairs of consecutive subtrees from left to right. + # At the end of the pass, only the prev pointers of the resulting + # subtrees have meaningful values. The other pointers will be fixed + # in pass 2. + prev = None + while True: + next = node.next + if next is None: + node.prev = prev + break + next_next = next.next + node = link(node, next) + node.prev = prev + prev = node + if next_next is None: + break + node = next_next + # Pass 2: Successively merge the subtrees produced by pass 1 from + # right to left with the rightmost one. + prev = node.prev + while prev is not None: + prev_prev = prev.prev + node = link(prev, node) + prev = prev_prev + # Now node can become the new root. Its has no parent nor siblings. + node.prev = None + node.next = None + node.parent = None + return node + + def _cut(self, node): + """Cut a node from its parent.""" + prev = node.prev + next = node.next + if prev is not None: + prev.next = next + else: + node.parent.left = next + node.prev = None + if next is not None: + next.prev = prev + node.next = None + node.parent = None + + +class BinaryHeap(MinHeap): + """A binary heap.""" + + def __init__(self): + """Initialize a binary heap.""" + super().__init__() + self._heap = [] + self._count = count() + + def min(self): + dict = self._dict + if not dict: + raise nx.NetworkXError("heap is empty") + heap = self._heap + # Repeatedly remove stale key-value pairs until a up-to-date one is + # met. + while True: + value, _, key = heap[0] + if key in dict and value == dict[key]: + break + heappop(heap) + return (key, value) + + def pop(self): + dict = self._dict + if not dict: + raise nx.NetworkXError("heap is empty") + heap = self._heap + # Repeatedly remove stale key-value pairs until a up-to-date one is + # met. + while True: + value, _, key = heap[0] + heappop(heap) + if key in dict and value == dict[key]: + break + del dict[key] + return (key, value) + + def get(self, key, default=None): + return self._dict.get(key, default) + + def insert(self, key, value, allow_increase=False): + dict = self._dict + if key in dict: + old_value = dict[key] + if value < old_value or (allow_increase and value > old_value): + # Since there is no way to efficiently obtain the location of a + # key-value pair in the heap, insert a new pair even if ones + # with the same key may already be present. Deem the old ones + # as stale and skip them when the minimum pair is queried. + dict[key] = value + heappush(self._heap, (value, next(self._count), key)) + return value < old_value + return False + else: + dict[key] = value + heappush(self._heap, (value, next(self._count), key)) + return True diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/mapped_queue.py b/.venv/lib/python3.12/site-packages/networkx/utils/mapped_queue.py new file mode 100644 index 0000000..0dcea36 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/mapped_queue.py @@ -0,0 +1,297 @@ +"""Priority queue class with updatable priorities.""" + +import heapq + +__all__ = ["MappedQueue"] + + +class _HeapElement: + """This proxy class separates the heap element from its priority. + + The idea is that using a 2-tuple (priority, element) works + for sorting, but not for dict lookup because priorities are + often floating point values so round-off can mess up equality. + + So, we need inequalities to look at the priority (for sorting) + and equality (and hash) to look at the element to enable + updates to the priority. + + Unfortunately, this class can be tricky to work with if you forget that + `__lt__` compares the priority while `__eq__` compares the element. + In `greedy_modularity_communities()` the following code is + used to check that two _HeapElements differ in either element or priority: + + if d_oldmax != row_max or d_oldmax.priority != row_max.priority: + + If the priorities are the same, this implementation uses the element + as a tiebreaker. This provides compatibility with older systems that + use tuples to combine priority and elements. + """ + + __slots__ = ["priority", "element", "_hash"] + + def __init__(self, priority, element): + self.priority = priority + self.element = element + self._hash = hash(element) + + def __lt__(self, other): + try: + other_priority = other.priority + except AttributeError: + return self.priority < other + # assume comparing to another _HeapElement + if self.priority == other_priority: + try: + return self.element < other.element + except TypeError as err: + raise TypeError( + "Consider using a tuple, with a priority value that can be compared." + ) + return self.priority < other_priority + + def __gt__(self, other): + try: + other_priority = other.priority + except AttributeError: + return self.priority > other + # assume comparing to another _HeapElement + if self.priority == other_priority: + try: + return self.element > other.element + except TypeError as err: + raise TypeError( + "Consider using a tuple, with a priority value that can be compared." + ) + return self.priority > other_priority + + def __eq__(self, other): + try: + return self.element == other.element + except AttributeError: + return self.element == other + + def __hash__(self): + return self._hash + + def __getitem__(self, indx): + return self.priority if indx == 0 else self.element[indx - 1] + + def __iter__(self): + yield self.priority + try: + yield from self.element + except TypeError: + yield self.element + + def __repr__(self): + return f"_HeapElement({self.priority}, {self.element})" + + +class MappedQueue: + """The MappedQueue class implements a min-heap with removal and update-priority. + + The min heap uses heapq as well as custom written _siftup and _siftdown + methods to allow the heap positions to be tracked by an additional dict + keyed by element to position. The smallest element can be popped in O(1) time, + new elements can be pushed in O(log n) time, and any element can be removed + or updated in O(log n) time. The queue cannot contain duplicate elements + and an attempt to push an element already in the queue will have no effect. + + MappedQueue complements the heapq package from the python standard + library. While MappedQueue is designed for maximum compatibility with + heapq, it adds element removal, lookup, and priority update. + + Parameters + ---------- + data : dict or iterable + + Examples + -------- + + A `MappedQueue` can be created empty, or optionally, given a dictionary + of initial elements and priorities. The methods `push`, `pop`, + `remove`, and `update` operate on the queue. + + >>> colors_nm = {"red": 665, "blue": 470, "green": 550} + >>> q = MappedQueue(colors_nm) + >>> q.remove("red") + >>> q.update("green", "violet", 400) + >>> q.push("indigo", 425) + True + >>> [q.pop().element for i in range(len(q.heap))] + ['violet', 'indigo', 'blue'] + + A `MappedQueue` can also be initialized with a list or other iterable. The priority is assumed + to be the sort order of the items in the list. + + >>> q = MappedQueue([916, 50, 4609, 493, 237]) + >>> q.remove(493) + >>> q.update(237, 1117) + >>> [q.pop() for i in range(len(q.heap))] + [50, 916, 1117, 4609] + + An exception is raised if the elements are not comparable. + + >>> q = MappedQueue([100, "a"]) + Traceback (most recent call last): + ... + TypeError: '<' not supported between instances of 'int' and 'str' + + To avoid the exception, use a dictionary to assign priorities to the elements. + + >>> q = MappedQueue({100: 0, "a": 1}) + + References + ---------- + .. [1] Cormen, T. H., Leiserson, C. E., Rivest, R. L., & Stein, C. (2001). + Introduction to algorithms second edition. + .. [2] Knuth, D. E. (1997). The art of computer programming (Vol. 3). + Pearson Education. + """ + + def __init__(self, data=None): + """Priority queue class with updatable priorities.""" + if data is None: + self.heap = [] + elif isinstance(data, dict): + self.heap = [_HeapElement(v, k) for k, v in data.items()] + else: + self.heap = list(data) + self.position = {} + self._heapify() + + def _heapify(self): + """Restore heap invariant and recalculate map.""" + heapq.heapify(self.heap) + self.position = {elt: pos for pos, elt in enumerate(self.heap)} + if len(self.heap) != len(self.position): + raise AssertionError("Heap contains duplicate elements") + + def __len__(self): + return len(self.heap) + + def push(self, elt, priority=None): + """Add an element to the queue.""" + if priority is not None: + elt = _HeapElement(priority, elt) + # If element is already in queue, do nothing + if elt in self.position: + return False + # Add element to heap and dict + pos = len(self.heap) + self.heap.append(elt) + self.position[elt] = pos + # Restore invariant by sifting down + self._siftdown(0, pos) + return True + + def pop(self): + """Remove and return the smallest element in the queue.""" + # Remove smallest element + elt = self.heap[0] + del self.position[elt] + # If elt is last item, remove and return + if len(self.heap) == 1: + self.heap.pop() + return elt + # Replace root with last element + last = self.heap.pop() + self.heap[0] = last + self.position[last] = 0 + # Restore invariant by sifting up + self._siftup(0) + # Return smallest element + return elt + + def update(self, elt, new, priority=None): + """Replace an element in the queue with a new one.""" + if priority is not None: + new = _HeapElement(priority, new) + # Replace + pos = self.position[elt] + self.heap[pos] = new + del self.position[elt] + self.position[new] = pos + # Restore invariant by sifting up + self._siftup(pos) + + def remove(self, elt): + """Remove an element from the queue.""" + # Find and remove element + try: + pos = self.position[elt] + del self.position[elt] + except KeyError: + # Not in queue + raise + # If elt is last item, remove and return + if pos == len(self.heap) - 1: + self.heap.pop() + return + # Replace elt with last element + last = self.heap.pop() + self.heap[pos] = last + self.position[last] = pos + # Restore invariant by sifting up + self._siftup(pos) + + def _siftup(self, pos): + """Move smaller child up until hitting a leaf. + + Built to mimic code for heapq._siftup + only updating position dict too. + """ + heap, position = self.heap, self.position + end_pos = len(heap) + startpos = pos + newitem = heap[pos] + # Shift up the smaller child until hitting a leaf + child_pos = (pos << 1) + 1 # start with leftmost child position + while child_pos < end_pos: + # Set child_pos to index of smaller child. + child = heap[child_pos] + right_pos = child_pos + 1 + if right_pos < end_pos: + right = heap[right_pos] + if not child < right: + child = right + child_pos = right_pos + # Move the smaller child up. + heap[pos] = child + position[child] = pos + pos = child_pos + child_pos = (pos << 1) + 1 + # pos is a leaf position. Put newitem there, and bubble it up + # to its final resting place (by sifting its parents down). + while pos > 0: + parent_pos = (pos - 1) >> 1 + parent = heap[parent_pos] + if not newitem < parent: + break + heap[pos] = parent + position[parent] = pos + pos = parent_pos + heap[pos] = newitem + position[newitem] = pos + + def _siftdown(self, start_pos, pos): + """Restore invariant. keep swapping with parent until smaller. + + Built to mimic code for heapq._siftdown + only updating position dict too. + """ + heap, position = self.heap, self.position + newitem = heap[pos] + # Follow the path to the root, moving parents down until finding a place + # newitem fits. + while pos > start_pos: + parent_pos = (pos - 1) >> 1 + parent = heap[parent_pos] + if not newitem < parent: + break + heap[pos] = parent + position[parent] = pos + pos = parent_pos + heap[pos] = newitem + position[newitem] = pos diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/misc.py b/.venv/lib/python3.12/site-packages/networkx/utils/misc.py new file mode 100644 index 0000000..73b5065 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/misc.py @@ -0,0 +1,651 @@ +""" +Miscellaneous Helpers for NetworkX. + +These are not imported into the base networkx namespace but +can be accessed, for example, as + +>>> import networkx as nx +>>> nx.utils.make_list_of_ints({1, 2, 3}) +[1, 2, 3] +>>> nx.utils.arbitrary_element({5, 1, 7}) # doctest: +SKIP +1 +""" + +import random +import warnings +from collections import defaultdict +from collections.abc import Iterable, Iterator, Sized +from itertools import chain, tee + +import networkx as nx + +__all__ = [ + "flatten", + "make_list_of_ints", + "dict_to_numpy_array", + "arbitrary_element", + "pairwise", + "groups", + "create_random_state", + "create_py_random_state", + "PythonRandomInterface", + "PythonRandomViaNumpyBits", + "nodes_equal", + "edges_equal", + "graphs_equal", + "_clear_cache", +] + + +# some cookbook stuff +# used in deciding whether something is a bunch of nodes, edges, etc. +# see G.add_nodes and others in Graph Class in networkx/base.py + + +def flatten(obj, result=None): + """Return flattened version of (possibly nested) iterable object.""" + if not isinstance(obj, Iterable | Sized) or isinstance(obj, str): + return obj + if result is None: + result = [] + for item in obj: + if not isinstance(item, Iterable | Sized) or isinstance(item, str): + result.append(item) + else: + flatten(item, result) + return tuple(result) + + +def make_list_of_ints(sequence): + """Return list of ints from sequence of integral numbers. + + All elements of the sequence must satisfy int(element) == element + or a ValueError is raised. Sequence is iterated through once. + + If sequence is a list, the non-int values are replaced with ints. + So, no new list is created + """ + if not isinstance(sequence, list): + result = [] + for i in sequence: + errmsg = f"sequence is not all integers: {i}" + try: + ii = int(i) + except ValueError: + raise nx.NetworkXError(errmsg) from None + if ii != i: + raise nx.NetworkXError(errmsg) + result.append(ii) + return result + # original sequence is a list... in-place conversion to ints + for indx, i in enumerate(sequence): + errmsg = f"sequence is not all integers: {i}" + if isinstance(i, int): + continue + try: + ii = int(i) + except ValueError: + raise nx.NetworkXError(errmsg) from None + if ii != i: + raise nx.NetworkXError(errmsg) + sequence[indx] = ii + return sequence + + +def dict_to_numpy_array(d, mapping=None): + """Convert a dictionary of dictionaries to a numpy array + with optional mapping.""" + try: + return _dict_to_numpy_array2(d, mapping) + except (AttributeError, TypeError): + # AttributeError is when no mapping was provided and v.keys() fails. + # TypeError is when a mapping was provided and d[k1][k2] fails. + return _dict_to_numpy_array1(d, mapping) + + +def _dict_to_numpy_array2(d, mapping=None): + """Convert a dictionary of dictionaries to a 2d numpy array + with optional mapping. + + """ + import numpy as np + + if mapping is None: + s = set(d.keys()) + for k, v in d.items(): + s.update(v.keys()) + mapping = dict(zip(s, range(len(s)))) + n = len(mapping) + a = np.zeros((n, n)) + for k1, i in mapping.items(): + for k2, j in mapping.items(): + try: + a[i, j] = d[k1][k2] + except KeyError: + pass + return a + + +def _dict_to_numpy_array1(d, mapping=None): + """Convert a dictionary of numbers to a 1d numpy array with optional mapping.""" + import numpy as np + + if mapping is None: + s = set(d.keys()) + mapping = dict(zip(s, range(len(s)))) + n = len(mapping) + a = np.zeros(n) + for k1, i in mapping.items(): + i = mapping[k1] + a[i] = d[k1] + return a + + +def arbitrary_element(iterable): + """Returns an arbitrary element of `iterable` without removing it. + + This is most useful for "peeking" at an arbitrary element of a set, + but can be used for any list, dictionary, etc., as well. + + Parameters + ---------- + iterable : `abc.collections.Iterable` instance + Any object that implements ``__iter__``, e.g. set, dict, list, tuple, + etc. + + Returns + ------- + The object that results from ``next(iter(iterable))`` + + Raises + ------ + ValueError + If `iterable` is an iterator (because the current implementation of + this function would consume an element from the iterator). + + Examples + -------- + Arbitrary elements from common Iterable objects: + + >>> nx.utils.arbitrary_element([1, 2, 3]) # list + 1 + >>> nx.utils.arbitrary_element((1, 2, 3)) # tuple + 1 + >>> nx.utils.arbitrary_element({1, 2, 3}) # set + 1 + >>> d = {k: v for k, v in zip([1, 2, 3], [3, 2, 1])} + >>> nx.utils.arbitrary_element(d) # dict_keys + 1 + >>> nx.utils.arbitrary_element(d.values()) # dict values + 3 + + `str` is also an Iterable: + + >>> nx.utils.arbitrary_element("hello") + 'h' + + :exc:`ValueError` is raised if `iterable` is an iterator: + + >>> iterator = iter([1, 2, 3]) # Iterator, *not* Iterable + >>> nx.utils.arbitrary_element(iterator) + Traceback (most recent call last): + ... + ValueError: cannot return an arbitrary item from an iterator + + Notes + ----- + This function does not return a *random* element. If `iterable` is + ordered, sequential calls will return the same value:: + + >>> l = [1, 2, 3] + >>> nx.utils.arbitrary_element(l) + 1 + >>> nx.utils.arbitrary_element(l) + 1 + + """ + if isinstance(iterable, Iterator): + raise ValueError("cannot return an arbitrary item from an iterator") + # Another possible implementation is ``for x in iterable: return x``. + return next(iter(iterable)) + + +# Recipe from the itertools documentation. +def pairwise(iterable, cyclic=False): + "s -> (s0, s1), (s1, s2), (s2, s3), ..." + a, b = tee(iterable) + first = next(b, None) + if cyclic is True: + return zip(a, chain(b, (first,))) + return zip(a, b) + + +def groups(many_to_one): + """Converts a many-to-one mapping into a one-to-many mapping. + + `many_to_one` must be a dictionary whose keys and values are all + :term:`hashable`. + + The return value is a dictionary mapping values from `many_to_one` + to sets of keys from `many_to_one` that have that value. + + Examples + -------- + >>> from networkx.utils import groups + >>> many_to_one = {"a": 1, "b": 1, "c": 2, "d": 3, "e": 3} + >>> groups(many_to_one) # doctest: +SKIP + {1: {'a', 'b'}, 2: {'c'}, 3: {'e', 'd'}} + """ + one_to_many = defaultdict(set) + for v, k in many_to_one.items(): + one_to_many[k].add(v) + return dict(one_to_many) + + +def create_random_state(random_state=None): + """Returns a numpy.random.RandomState or numpy.random.Generator instance + depending on input. + + Parameters + ---------- + random_state : int or NumPy RandomState or Generator instance, optional (default=None) + If int, return a numpy.random.RandomState instance set with seed=int. + if `numpy.random.RandomState` instance, return it. + if `numpy.random.Generator` instance, return it. + if None or numpy.random, return the global random number generator used + by numpy.random. + """ + import numpy as np + + if random_state is None or random_state is np.random: + return np.random.mtrand._rand + if isinstance(random_state, np.random.RandomState): + return random_state + if isinstance(random_state, int): + return np.random.RandomState(random_state) + if isinstance(random_state, np.random.Generator): + return random_state + msg = ( + f"{random_state} cannot be used to create a numpy.random.RandomState or\n" + "numpy.random.Generator instance" + ) + raise ValueError(msg) + + +class PythonRandomViaNumpyBits(random.Random): + """Provide the random.random algorithms using a numpy.random bit generator + + The intent is to allow people to contribute code that uses Python's random + library, but still allow users to provide a single easily controlled random + bit-stream for all work with NetworkX. This implementation is based on helpful + comments and code from Robert Kern on NumPy's GitHub Issue #24458. + + This implementation supersedes that of `PythonRandomInterface` which rewrote + methods to account for subtle differences in API between `random` and + `numpy.random`. Instead this subclasses `random.Random` and overwrites + the methods `random`, `getrandbits`, `getstate`, `setstate` and `seed`. + It makes them use the rng values from an input numpy `RandomState` or `Generator`. + Those few methods allow the rest of the `random.Random` methods to provide + the API interface of `random.random` while using randomness generated by + a numpy generator. + """ + + def __init__(self, rng=None): + try: + import numpy as np + except ImportError: + msg = "numpy not found, only random.random available." + warnings.warn(msg, ImportWarning) + + if rng is None: + self._rng = np.random.mtrand._rand + else: + self._rng = rng + + # Not necessary, given our overriding of gauss() below, but it's + # in the superclass and nominally public, so initialize it here. + self.gauss_next = None + + def random(self): + """Get the next random number in the range 0.0 <= X < 1.0.""" + return self._rng.random() + + def getrandbits(self, k): + """getrandbits(k) -> x. Generates an int with k random bits.""" + if k < 0: + raise ValueError("number of bits must be non-negative") + numbytes = (k + 7) // 8 # bits / 8 and rounded up + x = int.from_bytes(self._rng.bytes(numbytes), "big") + return x >> (numbytes * 8 - k) # trim excess bits + + def getstate(self): + return self._rng.__getstate__() + + def setstate(self, state): + self._rng.__setstate__(state) + + def seed(self, *args, **kwds): + "Do nothing override method." + raise NotImplementedError("seed() not implemented in PythonRandomViaNumpyBits") + + +################################################################## +class PythonRandomInterface: + """PythonRandomInterface is included for backward compatibility + New code should use PythonRandomViaNumpyBits instead. + """ + + def __init__(self, rng=None): + try: + import numpy as np + except ImportError: + msg = "numpy not found, only random.random available." + warnings.warn(msg, ImportWarning) + + if rng is None: + self._rng = np.random.mtrand._rand + else: + self._rng = rng + + def random(self): + return self._rng.random() + + def uniform(self, a, b): + return a + (b - a) * self._rng.random() + + def randrange(self, a, b=None): + import numpy as np + + if b is None: + a, b = 0, a + if b > 9223372036854775807: # from np.iinfo(np.int64).max + tmp_rng = PythonRandomViaNumpyBits(self._rng) + return tmp_rng.randrange(a, b) + + if isinstance(self._rng, np.random.Generator): + return self._rng.integers(a, b) + return self._rng.randint(a, b) + + # NOTE: the numpy implementations of `choice` don't support strings, so + # this cannot be replaced with self._rng.choice + def choice(self, seq): + import numpy as np + + if isinstance(self._rng, np.random.Generator): + idx = self._rng.integers(0, len(seq)) + else: + idx = self._rng.randint(0, len(seq)) + return seq[idx] + + def gauss(self, mu, sigma): + return self._rng.normal(mu, sigma) + + def shuffle(self, seq): + return self._rng.shuffle(seq) + + # Some methods don't match API for numpy RandomState. + # Commented out versions are not used by NetworkX + + def sample(self, seq, k): + return self._rng.choice(list(seq), size=(k,), replace=False) + + def randint(self, a, b): + import numpy as np + + if b > 9223372036854775807: # from np.iinfo(np.int64).max + tmp_rng = PythonRandomViaNumpyBits(self._rng) + return tmp_rng.randint(a, b) + + if isinstance(self._rng, np.random.Generator): + return self._rng.integers(a, b + 1) + return self._rng.randint(a, b + 1) + + # exponential as expovariate with 1/argument, + def expovariate(self, scale): + return self._rng.exponential(1 / scale) + + # pareto as paretovariate with 1/argument, + def paretovariate(self, shape): + return self._rng.pareto(shape) + + +# weibull as weibullvariate multiplied by beta, +# def weibullvariate(self, alpha, beta): +# return self._rng.weibull(alpha) * beta +# +# def triangular(self, low, high, mode): +# return self._rng.triangular(low, mode, high) +# +# def choices(self, seq, weights=None, cum_weights=None, k=1): +# return self._rng.choice(seq + + +def create_py_random_state(random_state=None): + """Returns a random.Random instance depending on input. + + Parameters + ---------- + random_state : int or random number generator or None (default=None) + - If int, return a `random.Random` instance set with seed=int. + - If `random.Random` instance, return it. + - If None or the `np.random` package, return the global random number + generator used by `np.random`. + - If an `np.random.Generator` instance, or the `np.random` package, or + the global numpy random number generator, then return it. + wrapped in a `PythonRandomViaNumpyBits` class. + - If a `PythonRandomViaNumpyBits` instance, return it. + - If a `PythonRandomInterface` instance, return it. + - If a `np.random.RandomState` instance and not the global numpy default, + return it wrapped in `PythonRandomInterface` for backward bit-stream + matching with legacy code. + + Notes + ----- + - A diagram intending to illustrate the relationships behind our support + for numpy random numbers is called + `NetworkX Numpy Random Numbers `_. + - More discussion about this support also appears in + `gh-6869#comment `_. + - Wrappers of numpy.random number generators allow them to mimic the Python random + number generation algorithms. For example, Python can create arbitrarily large + random ints, and the wrappers use Numpy bit-streams with CPython's random module + to choose arbitrarily large random integers too. + - We provide two wrapper classes: + `PythonRandomViaNumpyBits` is usually what you want and is always used for + `np.Generator` instances. But for users who need to recreate random numbers + produced in NetworkX 3.2 or earlier, we maintain the `PythonRandomInterface` + wrapper as well. We use it only used if passed a (non-default) `np.RandomState` + instance pre-initialized from a seed. Otherwise the newer wrapper is used. + """ + if random_state is None or random_state is random: + return random._inst + if isinstance(random_state, random.Random): + return random_state + if isinstance(random_state, int): + return random.Random(random_state) + + try: + import numpy as np + except ImportError: + pass + else: + if isinstance(random_state, PythonRandomInterface | PythonRandomViaNumpyBits): + return random_state + if isinstance(random_state, np.random.Generator): + return PythonRandomViaNumpyBits(random_state) + if random_state is np.random: + return PythonRandomViaNumpyBits(np.random.mtrand._rand) + + if isinstance(random_state, np.random.RandomState): + if random_state is np.random.mtrand._rand: + return PythonRandomViaNumpyBits(random_state) + # Only need older interface if specially constructed RandomState used + return PythonRandomInterface(random_state) + + msg = f"{random_state} cannot be used to generate a random.Random instance" + raise ValueError(msg) + + +def nodes_equal(nodes1, nodes2): + """Check if nodes are equal. + + Equality here means equal as Python objects. + Node data must match if included. + The order of nodes is not relevant. + + Parameters + ---------- + nodes1, nodes2 : iterables of nodes, or (node, datadict) tuples + + Returns + ------- + bool + True if nodes are equal, False otherwise. + """ + nlist1 = list(nodes1) + nlist2 = list(nodes2) + try: + d1 = dict(nlist1) + d2 = dict(nlist2) + except (ValueError, TypeError): + d1 = dict.fromkeys(nlist1) + d2 = dict.fromkeys(nlist2) + return d1 == d2 + + +def edges_equal(edges1, edges2): + """Check if edges are equal. + + Equality here means equal as Python objects. + Edge data must match if included. + The order of the edges is not relevant. + + Parameters + ---------- + edges1, edges2 : iterables of with u, v nodes as + edge tuples (u, v), or + edge tuples with data dicts (u, v, d), or + edge tuples with keys and data dicts (u, v, k, d) + + Returns + ------- + bool + True if edges are equal, False otherwise. + """ + from collections import defaultdict + + d1 = defaultdict(dict) + d2 = defaultdict(dict) + c1 = 0 + for c1, e in enumerate(edges1): + u, v = e[0], e[1] + data = [e[2:]] + if v in d1[u]: + data = d1[u][v] + data + d1[u][v] = data + d1[v][u] = data + c2 = 0 + for c2, e in enumerate(edges2): + u, v = e[0], e[1] + data = [e[2:]] + if v in d2[u]: + data = d2[u][v] + data + d2[u][v] = data + d2[v][u] = data + if c1 != c2: + return False + # can check one direction because lengths are the same. + for n, nbrdict in d1.items(): + for nbr, datalist in nbrdict.items(): + if n not in d2: + return False + if nbr not in d2[n]: + return False + d2datalist = d2[n][nbr] + for data in datalist: + if datalist.count(data) != d2datalist.count(data): + return False + return True + + +def graphs_equal(graph1, graph2): + """Check if graphs are equal. + + Equality here means equal as Python objects (not isomorphism). + Node, edge and graph data must match. + + Parameters + ---------- + graph1, graph2 : graph + + Returns + ------- + bool + True if graphs are equal, False otherwise. + """ + return ( + graph1.adj == graph2.adj + and graph1.nodes == graph2.nodes + and graph1.graph == graph2.graph + ) + + +def _clear_cache(G): + """Clear the cache of a graph (currently stores converted graphs). + + Caching is controlled via ``nx.config.cache_converted_graphs`` configuration. + """ + if cache := getattr(G, "__networkx_cache__", None): + cache.clear() + + +def check_create_using(create_using, *, directed=None, multigraph=None, default=None): + """Assert that create_using has good properties + + This checks for desired directedness and multi-edge properties. + It returns `create_using` unless that is `None` when it returns + the optionally specified default value. + + Parameters + ---------- + create_using : None, graph class or instance + The input value of create_using for a function. + directed : None or bool + Whether to check `create_using.is_directed() == directed`. + If None, do not assert directedness. + multigraph : None or bool + Whether to check `create_using.is_multigraph() == multigraph`. + If None, do not assert multi-edge property. + default : None or graph class + The graph class to return if create_using is None. + + Returns + ------- + create_using : graph class or instance + The provided graph class or instance, or if None, the `default` value. + + Raises + ------ + NetworkXError + When `create_using` doesn't match the properties specified by `directed` + or `multigraph` parameters. + """ + if default is None: + default = nx.Graph + G = create_using if create_using is not None else default + + G_directed = G.is_directed(None) if isinstance(G, type) else G.is_directed() + G_multigraph = G.is_multigraph(None) if isinstance(G, type) else G.is_multigraph() + + if directed is not None: + if directed and not G_directed: + raise nx.NetworkXError("create_using must be directed") + if not directed and G_directed: + raise nx.NetworkXError("create_using must not be directed") + + if multigraph is not None: + if multigraph and not G_multigraph: + raise nx.NetworkXError("create_using must be a multi-graph") + if not multigraph and G_multigraph: + raise nx.NetworkXError("create_using must not be a multi-graph") + return G diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/random_sequence.py b/.venv/lib/python3.12/site-packages/networkx/utils/random_sequence.py new file mode 100644 index 0000000..20a7b5e --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/random_sequence.py @@ -0,0 +1,164 @@ +""" +Utilities for generating random numbers, random sequences, and +random selections. +""" + +import networkx as nx +from networkx.utils import py_random_state + +__all__ = [ + "powerlaw_sequence", + "zipf_rv", + "cumulative_distribution", + "discrete_sequence", + "random_weighted_sample", + "weighted_choice", +] + + +# The same helpers for choosing random sequences from distributions +# uses Python's random module +# https://docs.python.org/3/library/random.html + + +@py_random_state(2) +def powerlaw_sequence(n, exponent=2.0, seed=None): + """ + Return sample sequence of length n from a power law distribution. + """ + return [seed.paretovariate(exponent - 1) for i in range(n)] + + +@py_random_state(2) +def zipf_rv(alpha, xmin=1, seed=None): + r"""Returns a random value chosen from the Zipf distribution. + + The return value is an integer drawn from the probability distribution + + .. math:: + + p(x)=\frac{x^{-\alpha}}{\zeta(\alpha, x_{\min})}, + + where $\zeta(\alpha, x_{\min})$ is the Hurwitz zeta function. + + Parameters + ---------- + alpha : float + Exponent value of the distribution + xmin : int + Minimum value + seed : integer, random_state, or None (default) + Indicator of random number generation state. + See :ref:`Randomness`. + + Returns + ------- + x : int + Random value from Zipf distribution + + Raises + ------ + ValueError: + If xmin < 1 or + If alpha <= 1 + + Notes + ----- + The rejection algorithm generates random values for a the power-law + distribution in uniformly bounded expected time dependent on + parameters. See [1]_ for details on its operation. + + Examples + -------- + >>> nx.utils.zipf_rv(alpha=2, xmin=3, seed=42) + 8 + + References + ---------- + .. [1] Luc Devroye, Non-Uniform Random Variate Generation, + Springer-Verlag, New York, 1986. + """ + if xmin < 1: + raise ValueError("xmin < 1") + if alpha <= 1: + raise ValueError("a <= 1.0") + a1 = alpha - 1.0 + b = 2**a1 + while True: + u = 1.0 - seed.random() # u in (0,1] + v = seed.random() # v in [0,1) + x = int(xmin * u ** -(1.0 / a1)) + t = (1.0 + (1.0 / x)) ** a1 + if v * x * (t - 1.0) / (b - 1.0) <= t / b: + break + return x + + +def cumulative_distribution(distribution): + """Returns normalized cumulative distribution from discrete distribution.""" + + cdf = [0.0] + psum = sum(distribution) + for i in range(len(distribution)): + cdf.append(cdf[i] + distribution[i] / psum) + return cdf + + +@py_random_state(3) +def discrete_sequence(n, distribution=None, cdistribution=None, seed=None): + """ + Return sample sequence of length n from a given discrete distribution + or discrete cumulative distribution. + + One of the following must be specified. + + distribution = histogram of values, will be normalized + + cdistribution = normalized discrete cumulative distribution + + """ + import bisect + + if cdistribution is not None: + cdf = cdistribution + elif distribution is not None: + cdf = cumulative_distribution(distribution) + else: + raise nx.NetworkXError( + "discrete_sequence: distribution or cdistribution missing" + ) + + # get a uniform random number + inputseq = [seed.random() for i in range(n)] + + # choose from CDF + seq = [bisect.bisect_left(cdf, s) - 1 for s in inputseq] + return seq + + +@py_random_state(2) +def random_weighted_sample(mapping, k, seed=None): + """Returns k items without replacement from a weighted sample. + + The input is a dictionary of items with weights as values. + """ + if k > len(mapping): + raise ValueError("sample larger than population") + sample = set() + while len(sample) < k: + sample.add(weighted_choice(mapping, seed)) + return list(sample) + + +@py_random_state(1) +def weighted_choice(mapping, seed=None): + """Returns a single element from a weighted sample. + + The input is a dictionary of items with weights as values. + """ + # use roulette method + rnd = seed.random() * sum(mapping.values()) + for k, w in mapping.items(): + rnd -= w + if rnd < 0: + return k diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/rcm.py b/.venv/lib/python3.12/site-packages/networkx/utils/rcm.py new file mode 100644 index 0000000..7465c50 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/rcm.py @@ -0,0 +1,159 @@ +""" +Cuthill-McKee ordering of graph nodes to produce sparse matrices +""" + +from collections import deque +from operator import itemgetter + +import networkx as nx + +from ..utils import arbitrary_element + +__all__ = ["cuthill_mckee_ordering", "reverse_cuthill_mckee_ordering"] + + +def cuthill_mckee_ordering(G, heuristic=None): + """Generate an ordering (permutation) of the graph nodes to make + a sparse matrix. + + Uses the Cuthill-McKee heuristic (based on breadth-first search) [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + heuristic : function, optional + Function to choose starting node for RCM algorithm. If None + a node from a pseudo-peripheral pair is used. A user-defined function + can be supplied that takes a graph object and returns a single node. + + Returns + ------- + nodes : generator + Generator of nodes in Cuthill-McKee ordering. + + Examples + -------- + >>> from networkx.utils import cuthill_mckee_ordering + >>> G = nx.path_graph(4) + >>> rcm = list(cuthill_mckee_ordering(G)) + >>> A = nx.adjacency_matrix(G, nodelist=rcm) + + Smallest degree node as heuristic function: + + >>> def smallest_degree(G): + ... return min(G, key=G.degree) + >>> rcm = list(cuthill_mckee_ordering(G, heuristic=smallest_degree)) + + + See Also + -------- + reverse_cuthill_mckee_ordering + + Notes + ----- + The optimal solution the bandwidth reduction is NP-complete [2]_. + + + References + ---------- + .. [1] E. Cuthill and J. McKee. + Reducing the bandwidth of sparse symmetric matrices, + In Proc. 24th Nat. Conf. ACM, pages 157-172, 1969. + http://doi.acm.org/10.1145/800195.805928 + .. [2] Steven S. Skiena. 1997. The Algorithm Design Manual. + Springer-Verlag New York, Inc., New York, NY, USA. + """ + for c in nx.connected_components(G): + yield from connected_cuthill_mckee_ordering(G.subgraph(c), heuristic) + + +def reverse_cuthill_mckee_ordering(G, heuristic=None): + """Generate an ordering (permutation) of the graph nodes to make + a sparse matrix. + + Uses the reverse Cuthill-McKee heuristic (based on breadth-first search) + [1]_. + + Parameters + ---------- + G : graph + A NetworkX graph + + heuristic : function, optional + Function to choose starting node for RCM algorithm. If None + a node from a pseudo-peripheral pair is used. A user-defined function + can be supplied that takes a graph object and returns a single node. + + Returns + ------- + nodes : generator + Generator of nodes in reverse Cuthill-McKee ordering. + + Examples + -------- + >>> from networkx.utils import reverse_cuthill_mckee_ordering + >>> G = nx.path_graph(4) + >>> rcm = list(reverse_cuthill_mckee_ordering(G)) + >>> A = nx.adjacency_matrix(G, nodelist=rcm) + + Smallest degree node as heuristic function: + + >>> def smallest_degree(G): + ... return min(G, key=G.degree) + >>> rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree)) + + + See Also + -------- + cuthill_mckee_ordering + + Notes + ----- + The optimal solution the bandwidth reduction is NP-complete [2]_. + + References + ---------- + .. [1] E. Cuthill and J. McKee. + Reducing the bandwidth of sparse symmetric matrices, + In Proc. 24th Nat. Conf. ACM, pages 157-72, 1969. + http://doi.acm.org/10.1145/800195.805928 + .. [2] Steven S. Skiena. 1997. The Algorithm Design Manual. + Springer-Verlag New York, Inc., New York, NY, USA. + """ + return reversed(list(cuthill_mckee_ordering(G, heuristic=heuristic))) + + +def connected_cuthill_mckee_ordering(G, heuristic=None): + # the cuthill mckee algorithm for connected graphs + if heuristic is None: + start = pseudo_peripheral_node(G) + else: + start = heuristic(G) + visited = {start} + queue = deque([start]) + while queue: + parent = queue.popleft() + yield parent + nd = sorted(G.degree(set(G[parent]) - visited), key=itemgetter(1)) + children = [n for n, d in nd] + visited.update(children) + queue.extend(children) + + +def pseudo_peripheral_node(G): + # helper for cuthill-mckee to find a node in a "pseudo peripheral pair" + # to use as good starting node + u = arbitrary_element(G) + lp = 0 + v = u + while True: + spl = nx.shortest_path_length(G, v) + l = max(spl.values()) + if l <= lp: + break + lp = l + farthest = (n for n, dist in spl.items() if dist == l) + v, deg = min(G.degree(farthest), key=itemgetter(1)) + return v diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__init__.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..0f195dd Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-312.pyc new file mode 100644 index 0000000..5840011 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test__init.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-312.pyc new file mode 100644 index 0000000..604e6e3 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_backends.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-312.pyc new file mode 100644 index 0000000..cf7a7df Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_config.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-312.pyc new file mode 100644 index 0000000..d33aa84 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_decorators.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-312.pyc new file mode 100644 index 0000000..0e293a0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_heaps.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-312.pyc new file mode 100644 index 0000000..8a7d46e Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_mapped_queue.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-312.pyc new file mode 100644 index 0000000..e4bccd0 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_misc.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-312.pyc new file mode 100644 index 0000000..d922019 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_random_sequence.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-312.pyc new file mode 100644 index 0000000..81bc370 Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_rcm.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_unionfind.cpython-312.pyc b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_unionfind.cpython-312.pyc new file mode 100644 index 0000000..ca6111d Binary files /dev/null and b/.venv/lib/python3.12/site-packages/networkx/utils/tests/__pycache__/test_unionfind.cpython-312.pyc differ diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test__init.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test__init.py new file mode 100644 index 0000000..ecbcce3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test__init.py @@ -0,0 +1,11 @@ +import pytest + + +def test_utils_namespace(): + """Ensure objects are not unintentionally exposed in utils namespace.""" + with pytest.raises(ImportError): + from networkx.utils import nx + with pytest.raises(ImportError): + from networkx.utils import sys + with pytest.raises(ImportError): + from networkx.utils import defaultdict, deque diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_backends.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_backends.py new file mode 100644 index 0000000..fb0309c --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_backends.py @@ -0,0 +1,187 @@ +import pickle + +import pytest + +import networkx as nx + +sp = pytest.importorskip("scipy") +pytest.importorskip("numpy") + + +@nx._dispatchable(implemented_by_nx=False) +def _stub_func(G): + raise NotImplementedError("_stub_func is a stub") + + +def test_dispatch_kwds_vs_args(): + G = nx.path_graph(4) + nx.pagerank(G) + nx.pagerank(G=G) + with pytest.raises(TypeError): + nx.pagerank() + + +def test_pickle(): + count = 0 + for name, func in nx.utils.backends._registered_algorithms.items(): + pickled = pickle.dumps(func.__wrapped__) + assert pickle.loads(pickled) is func.__wrapped__ + try: + # Some functions can't be pickled, but it's not b/c of _dispatchable + pickled = pickle.dumps(func) + except pickle.PicklingError: + continue + assert pickle.loads(pickled) is func + count += 1 + assert count > 0 + assert pickle.loads(pickle.dumps(nx.inverse_line_graph)) is nx.inverse_line_graph + + +@pytest.mark.skipif( + "not nx.config.backend_priority.algos " + "or nx.config.backend_priority.algos[0] != 'nx_loopback'" +) +def test_graph_converter_needs_backend(): + # When testing, `nx.from_scipy_sparse_array` will *always* call the backend + # implementation if it's implemented. If `backend=` isn't given, then the result + # will be converted back to NetworkX via `convert_to_nx`. + # If not testing, then calling `nx.from_scipy_sparse_array` w/o `backend=` will + # always call the original version. `backend=` is *required* to call the backend. + from networkx.classes.tests.dispatch_interface import ( + LoopbackBackendInterface, + LoopbackGraph, + ) + + A = sp.sparse.coo_array([[0, 3, 2], [3, 0, 1], [2, 1, 0]]) + + side_effects = [] + + def from_scipy_sparse_array(self, *args, **kwargs): + side_effects.append(1) # Just to prove this was called + return self.convert_from_nx( + self.__getattr__("from_scipy_sparse_array")(*args, **kwargs), + preserve_edge_attrs=True, + preserve_node_attrs=True, + preserve_graph_attrs=True, + ) + + @staticmethod + def convert_to_nx(obj, *, name=None): + if type(obj) is nx.Graph: + return obj + return nx.Graph(obj) + + # *This mutates LoopbackBackendInterface!* + orig_convert_to_nx = LoopbackBackendInterface.convert_to_nx + LoopbackBackendInterface.convert_to_nx = convert_to_nx + LoopbackBackendInterface.from_scipy_sparse_array = from_scipy_sparse_array + + try: + assert side_effects == [] + assert type(nx.from_scipy_sparse_array(A)) is nx.Graph + assert side_effects == [1] + assert ( + type(nx.from_scipy_sparse_array(A, backend="nx_loopback")) is LoopbackGraph + ) + assert side_effects == [1, 1] + # backend="networkx" is default implementation + assert type(nx.from_scipy_sparse_array(A, backend="networkx")) is nx.Graph + assert side_effects == [1, 1] + finally: + LoopbackBackendInterface.convert_to_nx = staticmethod(orig_convert_to_nx) + del LoopbackBackendInterface.from_scipy_sparse_array + with pytest.raises(ImportError, match="backend is not installed"): + nx.from_scipy_sparse_array(A, backend="bad-backend-name") + + +@pytest.mark.skipif( + "not nx.config.backend_priority.algos " + "or nx.config.backend_priority.algos[0] != 'nx_loopback'" +) +def test_networkx_backend(): + """Test using `backend="networkx"` in a dispatchable function.""" + # (Implementing this test is harder than it should be) + from networkx.classes.tests.dispatch_interface import ( + LoopbackBackendInterface, + LoopbackGraph, + ) + + G = LoopbackGraph() + G.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4)]) + + @staticmethod + def convert_to_nx(obj, *, name=None): + if isinstance(obj, LoopbackGraph): + new_graph = nx.Graph() + new_graph.__dict__.update(obj.__dict__) + return new_graph + return obj + + # *This mutates LoopbackBackendInterface!* + # This uses the same trick as in the previous test. + orig_convert_to_nx = LoopbackBackendInterface.convert_to_nx + LoopbackBackendInterface.convert_to_nx = convert_to_nx + try: + G2 = nx.ego_graph(G, 0, backend="networkx") + assert type(G2) is nx.Graph + finally: + LoopbackBackendInterface.convert_to_nx = staticmethod(orig_convert_to_nx) + + +def test_dispatchable_are_functions(): + assert type(nx.pagerank) is type(nx.pagerank.orig_func) + + +@pytest.mark.skipif("not nx.utils.backends.backends") +def test_mixing_backend_graphs(): + from networkx.classes.tests import dispatch_interface + + G = nx.Graph() + G.add_edge(1, 2) + G.add_edge(2, 3) + H = nx.Graph() + H.add_edge(2, 3) + rv = nx.intersection(G, H) + assert set(nx.intersection(G, H)) == {2, 3} + G2 = dispatch_interface.convert(G) + H2 = dispatch_interface.convert(H) + if "nx_loopback" in nx.config.backend_priority: + # Auto-convert + assert set(nx.intersection(G2, H)) == {2, 3} + assert set(nx.intersection(G, H2)) == {2, 3} + elif not nx.config.backend_priority and "nx_loopback" not in nx.config.backends: + # G2 and H2 are backend objects for a backend that is not registered! + with pytest.raises(ImportError, match="backend is not installed"): + nx.intersection(G2, H) + with pytest.raises(ImportError, match="backend is not installed"): + nx.intersection(G, H2) + # It would be nice to test passing graphs from *different* backends, + # but we are not set up to do this yet. + + +def test_bad_backend_name(): + """Using `backend=` raises with unknown backend even if there are no backends.""" + with pytest.raises( + ImportError, match="'this_backend_does_not_exist' backend is not installed" + ): + nx.null_graph(backend="this_backend_does_not_exist") + + +def test_not_implemented_by_nx(): + assert "networkx" in nx.pagerank.backends + assert "networkx" not in _stub_func.backends + + if "nx_loopback" in nx.config.backends: + from networkx.classes.tests.dispatch_interface import LoopbackBackendInterface + + def stub_func_implementation(G): + return True + + LoopbackBackendInterface._stub_func = staticmethod(stub_func_implementation) + try: + assert _stub_func(nx.Graph()) is True + finally: + del LoopbackBackendInterface._stub_func + + with pytest.raises(NotImplementedError): + _stub_func(nx.Graph()) diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_config.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_config.py new file mode 100644 index 0000000..d4fe902 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_config.py @@ -0,0 +1,263 @@ +import collections +import pickle + +import pytest + +import networkx as nx +from networkx.utils.configs import BackendPriorities, Config + + +# Define this at module level so we can test pickling +class ExampleConfig(Config): + """Example configuration.""" + + x: int + y: str + + def _on_setattr(self, key, value): + if key == "x" and value <= 0: + raise ValueError("x must be positive") + if key == "y" and not isinstance(value, str): + raise TypeError("y must be a str") + return value + + +class EmptyConfig(Config): + pass + + +@pytest.mark.parametrize("cfg", [EmptyConfig(), Config()]) +def test_config_empty(cfg): + assert dir(cfg) == [] + with pytest.raises(AttributeError): + cfg.x = 1 + with pytest.raises(KeyError): + cfg["x"] = 1 + with pytest.raises(AttributeError): + cfg.x + with pytest.raises(KeyError): + cfg["x"] + assert len(cfg) == 0 + assert "x" not in cfg + assert cfg == cfg + assert cfg.get("x", 2) == 2 + assert set(cfg.keys()) == set() + assert set(cfg.values()) == set() + assert set(cfg.items()) == set() + cfg2 = pickle.loads(pickle.dumps(cfg)) + assert cfg == cfg2 + assert isinstance(cfg, collections.abc.Collection) + assert isinstance(cfg, collections.abc.Mapping) + + +def test_config_subclass(): + with pytest.raises(TypeError, match="missing 2 required keyword-only"): + ExampleConfig() + with pytest.raises(ValueError, match="x must be positive"): + ExampleConfig(x=0, y="foo") + with pytest.raises(TypeError, match="unexpected keyword"): + ExampleConfig(x=1, y="foo", z="bad config") + with pytest.raises(TypeError, match="unexpected keyword"): + EmptyConfig(z="bad config") + cfg = ExampleConfig(x=1, y="foo") + assert cfg.x == 1 + assert cfg["x"] == 1 + assert cfg["y"] == "foo" + assert cfg.y == "foo" + assert "x" in cfg + assert "y" in cfg + assert "z" not in cfg + assert len(cfg) == 2 + assert set(iter(cfg)) == {"x", "y"} + assert set(cfg.keys()) == {"x", "y"} + assert set(cfg.values()) == {1, "foo"} + assert set(cfg.items()) == {("x", 1), ("y", "foo")} + assert dir(cfg) == ["x", "y"] + cfg.x = 2 + cfg["y"] = "bar" + assert cfg["x"] == 2 + assert cfg.y == "bar" + with pytest.raises(TypeError, match="can't be deleted"): + del cfg.x + with pytest.raises(TypeError, match="can't be deleted"): + del cfg["y"] + assert cfg.x == 2 + assert cfg == cfg + assert cfg == ExampleConfig(x=2, y="bar") + assert cfg != ExampleConfig(x=3, y="baz") + assert cfg != Config(x=2, y="bar") + with pytest.raises(TypeError, match="y must be a str"): + cfg["y"] = 5 + with pytest.raises(ValueError, match="x must be positive"): + cfg.x = -5 + assert cfg.get("x", 10) == 2 + with pytest.raises(AttributeError): + cfg.z = 5 + with pytest.raises(KeyError): + cfg["z"] = 5 + with pytest.raises(AttributeError): + cfg.z + with pytest.raises(KeyError): + cfg["z"] + cfg2 = pickle.loads(pickle.dumps(cfg)) + assert cfg == cfg2 + assert cfg.__doc__ == "Example configuration." + assert cfg2.__doc__ == "Example configuration." + + +def test_config_defaults(): + class DefaultConfig(Config): + x: int = 0 + y: int + + cfg = DefaultConfig(y=1) + assert cfg.x == 0 + cfg = DefaultConfig(x=2, y=1) + assert cfg.x == 2 + + +def test_nxconfig(): + assert isinstance(nx.config.backend_priority, BackendPriorities) + assert isinstance(nx.config.backend_priority.algos, list) + assert isinstance(nx.config.backends, Config) + with pytest.raises(TypeError, match="must be a list of backend names"): + nx.config.backend_priority.algos = "nx_loopback" + with pytest.raises(ValueError, match="Unknown backend when setting"): + nx.config.backend_priority.algos = ["this_almost_certainly_is_not_a_backend"] + with pytest.raises(TypeError, match="must be a Config of backend configs"): + nx.config.backends = {} + with pytest.raises(TypeError, match="must be a Config of backend configs"): + nx.config.backends = Config(plausible_backend_name={}) + with pytest.raises(ValueError, match="Unknown backend when setting"): + nx.config.backends = Config(this_almost_certainly_is_not_a_backend=Config()) + with pytest.raises(TypeError, match="must be True or False"): + nx.config.cache_converted_graphs = "bad value" + with pytest.raises(TypeError, match="must be a set of "): + nx.config.warnings_to_ignore = 7 + with pytest.raises(ValueError, match="Unknown warning "): + nx.config.warnings_to_ignore = {"bad value"} + + prev = nx.config.backend_priority + try: + nx.config.backend_priority = ["networkx"] + assert isinstance(nx.config.backend_priority, BackendPriorities) + assert nx.config.backend_priority.algos == ["networkx"] + finally: + nx.config.backend_priority = prev + + +def test_nxconfig_context(): + # We do some special handling so that `nx.config.backend_priority = val` + # actually does `nx.config.backend_priority.algos = val`. + orig = nx.config.backend_priority.algos + val = [] if orig else ["networkx"] + assert orig != val + assert nx.config.backend_priority.algos != val + with nx.config(backend_priority=val): + assert nx.config.backend_priority.algos == val + assert nx.config.backend_priority.algos == orig + with nx.config.backend_priority(algos=val): + assert nx.config.backend_priority.algos == val + assert nx.config.backend_priority.algos == orig + bad = ["bad-backend"] + with pytest.raises(ValueError, match="Unknown backend"): + nx.config.backend_priority = bad + with pytest.raises(ValueError, match="Unknown backend"): + with nx.config(backend_priority=bad): + pass + with pytest.raises(ValueError, match="Unknown backend"): + with nx.config.backend_priority(algos=bad): + pass + + +def test_not_strict(): + class FlexibleConfig(Config, strict=False): + x: int + + cfg = FlexibleConfig(x=1) + assert "_strict" not in cfg + assert len(cfg) == 1 + assert list(cfg) == ["x"] + assert list(cfg.keys()) == ["x"] + assert list(cfg.values()) == [1] + assert list(cfg.items()) == [("x", 1)] + assert cfg.x == 1 + assert cfg["x"] == 1 + assert "x" in cfg + assert hasattr(cfg, "x") + assert "FlexibleConfig(x=1)" in repr(cfg) + assert cfg == FlexibleConfig(x=1) + del cfg.x + assert "FlexibleConfig()" in repr(cfg) + assert len(cfg) == 0 + assert not hasattr(cfg, "x") + assert "x" not in cfg + assert not hasattr(cfg, "y") + assert "y" not in cfg + cfg.y = 2 + assert len(cfg) == 1 + assert list(cfg) == ["y"] + assert list(cfg.keys()) == ["y"] + assert list(cfg.values()) == [2] + assert list(cfg.items()) == [("y", 2)] + assert cfg.y == 2 + assert cfg["y"] == 2 + assert hasattr(cfg, "y") + assert "y" in cfg + del cfg["y"] + assert len(cfg) == 0 + assert list(cfg) == [] + with pytest.raises(AttributeError, match="y"): + del cfg.y + with pytest.raises(KeyError, match="y"): + del cfg["y"] + with pytest.raises(TypeError, match="missing 1 required keyword-only"): + FlexibleConfig() + # Be strict when first creating the config object + with pytest.raises(TypeError, match="unexpected keyword argument 'y'"): + FlexibleConfig(x=1, y=2) + + class FlexibleConfigWithDefault(Config, strict=False): + x: int = 0 + + assert FlexibleConfigWithDefault().x == 0 + assert FlexibleConfigWithDefault(x=1)["x"] == 1 + + +def test_context(): + cfg = Config(x=1) + with cfg(x=2) as c: + assert c.x == 2 + c.x = 3 + assert cfg.x == 3 + assert cfg.x == 1 + + with cfg(x=2) as c: + assert c == cfg + assert cfg.x == 2 + with cfg(x=3) as c2: + assert c2 == cfg + assert cfg.x == 3 + with pytest.raises(RuntimeError, match="context manager without"): + with cfg as c3: # Forgot to call `cfg(...)` + pass + assert cfg.x == 3 + assert cfg.x == 2 + assert cfg.x == 1 + + c = cfg(x=4) # Not yet as context (not recommended, but possible) + assert c == cfg + assert cfg.x == 4 + # Cheat by looking at internal data; context stack should only grow with __enter__ + assert cfg._prev is not None + assert cfg._context_stack == [] + with c: + assert c == cfg + assert cfg.x == 4 + assert cfg.x == 1 + # Cheat again; there was no preceding `cfg(...)` call this time + assert cfg._prev is None + with pytest.raises(RuntimeError, match="context manager without"): + with cfg: + pass + assert cfg.x == 1 diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_decorators.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_decorators.py new file mode 100644 index 0000000..0a4aeab --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_decorators.py @@ -0,0 +1,510 @@ +import os +import pathlib +import random +import tempfile + +import pytest + +import networkx as nx +from networkx.utils.decorators import ( + argmap, + not_implemented_for, + np_random_state, + open_file, + py_random_state, +) +from networkx.utils.misc import PythonRandomInterface, PythonRandomViaNumpyBits + + +def test_not_implemented_decorator(): + @not_implemented_for("directed") + def test_d(G): + pass + + test_d(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_d(nx.DiGraph()) + + @not_implemented_for("undirected") + def test_u(G): + pass + + test_u(nx.DiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_u(nx.Graph()) + + @not_implemented_for("multigraph") + def test_m(G): + pass + + test_m(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_m(nx.MultiGraph()) + + @not_implemented_for("graph") + def test_g(G): + pass + + test_g(nx.MultiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_g(nx.Graph()) + + # not MultiDiGraph (multiple arguments => AND) + @not_implemented_for("directed", "multigraph") + def test_not_md(G): + pass + + test_not_md(nx.Graph()) + test_not_md(nx.DiGraph()) + test_not_md(nx.MultiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_not_md(nx.MultiDiGraph()) + + # Graph only (multiple decorators => OR) + @not_implemented_for("directed") + @not_implemented_for("multigraph") + def test_graph_only(G): + pass + + test_graph_only(nx.Graph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_graph_only(nx.DiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_graph_only(nx.MultiGraph()) + with pytest.raises(nx.NetworkXNotImplemented): + test_graph_only(nx.MultiDiGraph()) + + with pytest.raises(ValueError): + not_implemented_for("directed", "undirected") + + with pytest.raises(ValueError): + not_implemented_for("multigraph", "graph") + + +def test_not_implemented_decorator_key(): + with pytest.raises(KeyError): + + @not_implemented_for("foo") + def test1(G): + pass + + test1(nx.Graph()) + + +def test_not_implemented_decorator_raise(): + with pytest.raises(nx.NetworkXNotImplemented): + + @not_implemented_for("graph") + def test1(G): + pass + + test1(nx.Graph()) + + +class TestOpenFileDecorator: + def setup_method(self): + self.text = ["Blah... ", "BLAH ", "BLAH!!!!"] + self.fobj = tempfile.NamedTemporaryFile("wb+", delete=False) + self.name = self.fobj.name + + def teardown_method(self): + self.fobj.close() + os.unlink(self.name) + + def write(self, path): + for text in self.text: + path.write(text.encode("ascii")) + + @open_file(1, "r") + def read(self, path): + return path.readlines()[0] + + @staticmethod + @open_file(0, "wb") + def writer_arg0(path): + path.write(b"demo") + + @open_file(1, "wb+") + def writer_arg1(self, path): + self.write(path) + + @open_file(2, "wb") + def writer_arg2default(self, x, path=None): + if path is None: + with tempfile.NamedTemporaryFile("wb+") as fh: + self.write(fh) + else: + self.write(path) + + @open_file(4, "wb") + def writer_arg4default(self, x, y, other="hello", path=None, **kwargs): + if path is None: + with tempfile.NamedTemporaryFile("wb+") as fh: + self.write(fh) + else: + self.write(path) + + @open_file("path", "wb") + def writer_kwarg(self, **kwargs): + path = kwargs.get("path", None) + if path is None: + with tempfile.NamedTemporaryFile("wb+") as fh: + self.write(fh) + else: + self.write(path) + + def test_writer_arg0_str(self): + self.writer_arg0(self.name) + + def test_writer_arg0_fobj(self): + self.writer_arg0(self.fobj) + + def test_writer_arg0_pathlib(self): + self.writer_arg0(pathlib.Path(self.name)) + + def test_writer_arg1_str(self): + self.writer_arg1(self.name) + assert self.read(self.name) == "".join(self.text) + + def test_writer_arg1_fobj(self): + self.writer_arg1(self.fobj) + assert not self.fobj.closed + self.fobj.close() + assert self.read(self.name) == "".join(self.text) + + def test_writer_arg2default_str(self): + self.writer_arg2default(0, path=None) + self.writer_arg2default(0, path=self.name) + assert self.read(self.name) == "".join(self.text) + + def test_writer_arg2default_fobj(self): + self.writer_arg2default(0, path=self.fobj) + assert not self.fobj.closed + self.fobj.close() + assert self.read(self.name) == "".join(self.text) + + def test_writer_arg2default_fobj_path_none(self): + self.writer_arg2default(0, path=None) + + def test_writer_arg4default_fobj(self): + self.writer_arg4default(0, 1, dog="dog", other="other") + self.writer_arg4default(0, 1, dog="dog", other="other", path=self.name) + assert self.read(self.name) == "".join(self.text) + + def test_writer_kwarg_str(self): + self.writer_kwarg(path=self.name) + assert self.read(self.name) == "".join(self.text) + + def test_writer_kwarg_fobj(self): + self.writer_kwarg(path=self.fobj) + self.fobj.close() + assert self.read(self.name) == "".join(self.text) + + def test_writer_kwarg_path_none(self): + self.writer_kwarg(path=None) + + +class TestRandomState: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + @np_random_state(1) + def instantiate_np_random_state(self, random_state): + allowed = (np.random.RandomState, np.random.Generator) + assert isinstance(random_state, allowed) + return random_state.random() + + @py_random_state(1) + def instantiate_py_random_state(self, random_state): + allowed = (random.Random, PythonRandomInterface, PythonRandomViaNumpyBits) + assert isinstance(random_state, allowed) + return random_state.random() + + def test_random_state_None(self): + np.random.seed(42) + rv = np.random.random() + np.random.seed(42) + assert rv == self.instantiate_np_random_state(None) + + random.seed(42) + rv = random.random() + random.seed(42) + assert rv == self.instantiate_py_random_state(None) + + def test_random_state_np_random(self): + np.random.seed(42) + rv = np.random.random() + np.random.seed(42) + assert rv == self.instantiate_np_random_state(np.random) + np.random.seed(42) + assert rv == self.instantiate_py_random_state(np.random) + + def test_random_state_int(self): + np.random.seed(42) + np_rv = np.random.random() + random.seed(42) + py_rv = random.random() + + np.random.seed(42) + seed = 1 + rval = self.instantiate_np_random_state(seed) + rval_expected = np.random.RandomState(seed).rand() + assert rval == rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random() + + random.seed(42) + rval = self.instantiate_py_random_state(seed) + rval_expected = random.Random(seed).random() + assert rval == rval_expected + # test that global seed wasn't changed in function + assert py_rv == random.random() + + def test_random_state_np_random_Generator(self): + np.random.seed(42) + np_rv = np.random.random() + np.random.seed(42) + seed = 1 + + rng = np.random.default_rng(seed) + rval = self.instantiate_np_random_state(rng) + rval_expected = np.random.default_rng(seed).random() + assert rval == rval_expected + + rval = self.instantiate_py_random_state(rng) + rval_expected = np.random.default_rng(seed).random(size=2)[1] + assert rval == rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random() + + def test_random_state_np_random_RandomState(self): + np.random.seed(42) + np_rv = np.random.random() + np.random.seed(42) + seed = 1 + + rng = np.random.RandomState(seed) + rval = self.instantiate_np_random_state(rng) + rval_expected = np.random.RandomState(seed).random() + assert rval == rval_expected + + rval = self.instantiate_py_random_state(rng) + rval_expected = np.random.RandomState(seed).random(size=2)[1] + assert rval == rval_expected + # test that global seed wasn't changed in function + assert np_rv == np.random.random() + + def test_random_state_py_random(self): + seed = 1 + rng = random.Random(seed) + rv = self.instantiate_py_random_state(rng) + assert rv == random.Random(seed).random() + + pytest.raises(ValueError, self.instantiate_np_random_state, rng) + + +def test_random_state_string_arg_index(): + with pytest.raises(nx.NetworkXError): + + @np_random_state("a") + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_py_random_state_string_arg_index(): + with pytest.raises(nx.NetworkXError): + + @py_random_state("a") + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_random_state_invalid_arg_index(): + with pytest.raises(nx.NetworkXError): + + @np_random_state(2) + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +def test_py_random_state_invalid_arg_index(): + with pytest.raises(nx.NetworkXError): + + @py_random_state(2) + def make_random_state(rs): + pass + + rstate = make_random_state(1) + + +class TestArgmap: + class ArgmapError(RuntimeError): + pass + + def test_trivial_function(self): + def do_not_call(x): + raise ArgmapError("do not call this function") + + @argmap(do_not_call) + def trivial_argmap(): + return 1 + + assert trivial_argmap() == 1 + + def test_trivial_iterator(self): + def do_not_call(x): + raise ArgmapError("do not call this function") + + @argmap(do_not_call) + def trivial_argmap(): + yield from (1, 2, 3) + + assert tuple(trivial_argmap()) == (1, 2, 3) + + def test_contextmanager(self): + container = [] + + def contextmanager(x): + nonlocal container + return x, lambda: container.append(x) + + @argmap(contextmanager, 0, 1, 2, try_finally=True) + def foo(x, y, z): + return x, y, z + + x, y, z = foo("a", "b", "c") + + # context exits are called in reverse + assert container == ["c", "b", "a"] + + def test_tryfinally_generator(self): + container = [] + + def singleton(x): + return (x,) + + with pytest.raises(nx.NetworkXError): + + @argmap(singleton, 0, 1, 2, try_finally=True) + def foo(x, y, z): + yield from (x, y, z) + + @argmap(singleton, 0, 1, 2) + def foo(x, y, z): + return x + y + z + + q = foo("a", "b", "c") + + assert q == ("a", "b", "c") + + def test_actual_vararg(self): + @argmap(lambda x: -x, 4) + def foo(x, y, *args): + return (x, y) + tuple(args) + + assert foo(1, 2, 3, 4, 5, 6) == (1, 2, 3, 4, -5, 6) + + def test_signature_destroying_intermediate_decorator(self): + def add_one_to_first_bad_decorator(f): + """Bad because it doesn't wrap the f signature (clobbers it)""" + + def decorated(a, *args, **kwargs): + return f(a + 1, *args, **kwargs) + + return decorated + + add_two_to_second = argmap(lambda b: b + 2, 1) + + @add_two_to_second + @add_one_to_first_bad_decorator + def add_one_and_two(a, b): + return a, b + + assert add_one_and_two(5, 5) == (6, 7) + + def test_actual_kwarg(self): + @argmap(lambda x: -x, "arg") + def foo(*, arg): + return arg + + assert foo(arg=3) == -3 + + def test_nested_tuple(self): + def xform(x, y): + u, v = y + return x + u + v, (x + u, x + v) + + # we're testing args and kwargs here, too + @argmap(xform, (0, ("t", 2))) + def foo(a, *args, **kwargs): + return a, args, kwargs + + a, args, kwargs = foo(1, 2, 3, t=4) + + assert a == 1 + 4 + 3 + assert args == (2, 1 + 3) + assert kwargs == {"t": 1 + 4} + + def test_flatten(self): + assert tuple(argmap._flatten([[[[[], []], [], []], [], [], []]], set())) == () + + rlist = ["a", ["b", "c"], [["d"], "e"], "f"] + assert "".join(argmap._flatten(rlist, set())) == "abcdef" + + def test_indent(self): + code = "\n".join( + argmap._indent( + *[ + "try:", + "try:", + "pass#", + "finally:", + "pass#", + "#", + "finally:", + "pass#", + ] + ) + ) + assert ( + code + == """try: + try: + pass# + finally: + pass# + # +finally: + pass#""" + ) + + def test_immediate_raise(self): + @not_implemented_for("directed") + def yield_nodes(G): + yield from G + + G = nx.Graph([(1, 2)]) + D = nx.DiGraph() + + # test first call (argmap is compiled and executed) + with pytest.raises(nx.NetworkXNotImplemented): + node_iter = yield_nodes(D) + + # test second call (argmap is only executed) + with pytest.raises(nx.NetworkXNotImplemented): + node_iter = yield_nodes(D) + + # ensure that generators still make generators + node_iter = yield_nodes(G) + next(node_iter) + next(node_iter) + with pytest.raises(StopIteration): + next(node_iter) diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_heaps.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_heaps.py new file mode 100644 index 0000000..5ea3871 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_heaps.py @@ -0,0 +1,131 @@ +import pytest + +import networkx as nx +from networkx.utils import BinaryHeap, PairingHeap + + +class X: + def __eq__(self, other): + raise self is other + + def __ne__(self, other): + raise self is not other + + def __lt__(self, other): + raise TypeError("cannot compare") + + def __le__(self, other): + raise TypeError("cannot compare") + + def __ge__(self, other): + raise TypeError("cannot compare") + + def __gt__(self, other): + raise TypeError("cannot compare") + + def __hash__(self): + return hash(id(self)) + + +x = X() + + +data = [ # min should not invent an element. + ("min", nx.NetworkXError), + # Popping an empty heap should fail. + ("pop", nx.NetworkXError), + # Getting nonexisting elements should return None. + ("get", 0, None), + ("get", x, None), + ("get", None, None), + # Inserting a new key should succeed. + ("insert", x, 1, True), + ("get", x, 1), + ("min", (x, 1)), + # min should not pop the top element. + ("min", (x, 1)), + # Inserting a new key of different type should succeed. + ("insert", 1, -2.0, True), + # int and float values should interop. + ("min", (1, -2.0)), + # pop removes minimum-valued element. + ("insert", 3, -(10**100), True), + ("insert", 4, 5, True), + ("pop", (3, -(10**100))), + ("pop", (1, -2.0)), + # Decrease-insert should succeed. + ("insert", 4, -50, True), + ("insert", 4, -60, False, True), + # Decrease-insert should not create duplicate keys. + ("pop", (4, -60)), + ("pop", (x, 1)), + # Popping all elements should empty the heap. + ("min", nx.NetworkXError), + ("pop", nx.NetworkXError), + # Non-value-changing insert should fail. + ("insert", x, 0, True), + ("insert", x, 0, False, False), + ("min", (x, 0)), + ("insert", x, 0, True, False), + ("min", (x, 0)), + # Failed insert should not create duplicate keys. + ("pop", (x, 0)), + ("pop", nx.NetworkXError), + # Increase-insert should succeed when allowed. + ("insert", None, 0, True), + ("insert", 2, -1, True), + ("min", (2, -1)), + ("insert", 2, 1, True, False), + ("min", (None, 0)), + # Increase-insert should fail when disallowed. + ("insert", None, 2, False, False), + ("min", (None, 0)), + # Failed increase-insert should not create duplicate keys. + ("pop", (None, 0)), + ("pop", (2, 1)), + ("min", nx.NetworkXError), + ("pop", nx.NetworkXError), +] + + +def _test_heap_class(cls, *args, **kwargs): + heap = cls(*args, **kwargs) + # Basic behavioral test + for op in data: + if op[-1] is not nx.NetworkXError: + assert op[-1] == getattr(heap, op[0])(*op[1:-1]) + else: + pytest.raises(op[-1], getattr(heap, op[0]), *op[1:-1]) + # Coverage test. + for i in range(99, -1, -1): + assert heap.insert(i, i) + for i in range(50): + assert heap.pop() == (i, i) + for i in range(100): + assert heap.insert(i, i) == (i < 50) + for i in range(100): + assert not heap.insert(i, i + 1) + for i in range(50): + assert heap.pop() == (i, i) + for i in range(100): + assert heap.insert(i, i + 1) == (i < 50) + for i in range(49): + assert heap.pop() == (i, i + 1) + assert sorted([heap.pop(), heap.pop()]) == [(49, 50), (50, 50)] + for i in range(51, 100): + assert not heap.insert(i, i + 1, True) + for i in range(51, 70): + assert heap.pop() == (i, i + 1) + for i in range(100): + assert heap.insert(i, i) + for i in range(100): + assert heap.pop() == (i, i) + pytest.raises(nx.NetworkXError, heap.pop) + + +def test_PairingHeap(): + _test_heap_class(PairingHeap) + + +def test_BinaryHeap(): + _test_heap_class(BinaryHeap) diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_mapped_queue.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_mapped_queue.py new file mode 100644 index 0000000..ca9b7e4 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_mapped_queue.py @@ -0,0 +1,268 @@ +import pytest + +from networkx.utils.mapped_queue import MappedQueue, _HeapElement + + +def test_HeapElement_gtlt(): + bar = _HeapElement(1.1, "a") + foo = _HeapElement(1, "b") + assert foo < bar + assert bar > foo + assert foo < 1.1 + assert 1 < bar + + +def test_HeapElement_gtlt_tied_priority(): + bar = _HeapElement(1, "a") + foo = _HeapElement(1, "b") + assert foo > bar + assert bar < foo + + +def test_HeapElement_eq(): + bar = _HeapElement(1.1, "a") + foo = _HeapElement(1, "a") + assert foo == bar + assert bar == foo + assert foo == "a" + + +def test_HeapElement_iter(): + foo = _HeapElement(1, "a") + bar = _HeapElement(1.1, (3, 2, 1)) + assert list(foo) == [1, "a"] + assert list(bar) == [1.1, 3, 2, 1] + + +def test_HeapElement_getitem(): + foo = _HeapElement(1, "a") + bar = _HeapElement(1.1, (3, 2, 1)) + assert foo[1] == "a" + assert foo[0] == 1 + assert bar[0] == 1.1 + assert bar[2] == 2 + assert bar[3] == 1 + pytest.raises(IndexError, bar.__getitem__, 4) + pytest.raises(IndexError, foo.__getitem__, 2) + + +class TestMappedQueue: + def setup_method(self): + pass + + def _check_map(self, q): + assert q.position == {elt: pos for pos, elt in enumerate(q.heap)} + + def _make_mapped_queue(self, h): + q = MappedQueue() + q.heap = h + q.position = {elt: pos for pos, elt in enumerate(h)} + return q + + def test_heapify(self): + h = [5, 4, 3, 2, 1, 0] + q = self._make_mapped_queue(h) + q._heapify() + self._check_map(q) + + def test_init(self): + h = [5, 4, 3, 2, 1, 0] + q = MappedQueue(h) + self._check_map(q) + + def test_incomparable(self): + h = [5, 4, "a", 2, 1, 0] + pytest.raises(TypeError, MappedQueue, h) + + def test_len(self): + h = [5, 4, 3, 2, 1, 0] + q = MappedQueue(h) + self._check_map(q) + assert len(q) == 6 + + def test_siftup_leaf(self): + h = [2] + h_sifted = [2] + q = self._make_mapped_queue(h) + q._siftup(0) + assert q.heap == h_sifted + self._check_map(q) + + def test_siftup_one_child(self): + h = [2, 0] + h_sifted = [0, 2] + q = self._make_mapped_queue(h) + q._siftup(0) + assert q.heap == h_sifted + self._check_map(q) + + def test_siftup_left_child(self): + h = [2, 0, 1] + h_sifted = [0, 2, 1] + q = self._make_mapped_queue(h) + q._siftup(0) + assert q.heap == h_sifted + self._check_map(q) + + def test_siftup_right_child(self): + h = [2, 1, 0] + h_sifted = [0, 1, 2] + q = self._make_mapped_queue(h) + q._siftup(0) + assert q.heap == h_sifted + self._check_map(q) + + def test_siftup_multiple(self): + h = [0, 1, 2, 4, 3, 5, 6] + h_sifted = [0, 1, 2, 4, 3, 5, 6] + q = self._make_mapped_queue(h) + q._siftup(0) + assert q.heap == h_sifted + self._check_map(q) + + def test_siftdown_leaf(self): + h = [2] + h_sifted = [2] + q = self._make_mapped_queue(h) + q._siftdown(0, 0) + assert q.heap == h_sifted + self._check_map(q) + + def test_siftdown_single(self): + h = [1, 0] + h_sifted = [0, 1] + q = self._make_mapped_queue(h) + q._siftdown(0, len(h) - 1) + assert q.heap == h_sifted + self._check_map(q) + + def test_siftdown_multiple(self): + h = [1, 2, 3, 4, 5, 6, 7, 0] + h_sifted = [0, 1, 3, 2, 5, 6, 7, 4] + q = self._make_mapped_queue(h) + q._siftdown(0, len(h) - 1) + assert q.heap == h_sifted + self._check_map(q) + + def test_push(self): + to_push = [6, 1, 4, 3, 2, 5, 0] + h_sifted = [0, 2, 1, 6, 3, 5, 4] + q = MappedQueue() + for elt in to_push: + q.push(elt) + assert q.heap == h_sifted + self._check_map(q) + + def test_push_duplicate(self): + to_push = [2, 1, 0] + h_sifted = [0, 2, 1] + q = MappedQueue() + for elt in to_push: + inserted = q.push(elt) + assert inserted + assert q.heap == h_sifted + self._check_map(q) + inserted = q.push(1) + assert not inserted + + def test_pop(self): + h = [3, 4, 6, 0, 1, 2, 5] + h_sorted = sorted(h) + q = self._make_mapped_queue(h) + q._heapify() + popped = [q.pop() for _ in range(len(h))] + assert popped == h_sorted + self._check_map(q) + + def test_remove_leaf(self): + h = [0, 2, 1, 6, 3, 5, 4] + h_removed = [0, 2, 1, 6, 4, 5] + q = self._make_mapped_queue(h) + removed = q.remove(3) + assert q.heap == h_removed + + def test_remove_root(self): + h = [0, 2, 1, 6, 3, 5, 4] + h_removed = [1, 2, 4, 6, 3, 5] + q = self._make_mapped_queue(h) + removed = q.remove(0) + assert q.heap == h_removed + + def test_update_leaf(self): + h = [0, 20, 10, 60, 30, 50, 40] + h_updated = [0, 15, 10, 60, 20, 50, 40] + q = self._make_mapped_queue(h) + removed = q.update(30, 15) + assert q.heap == h_updated + + def test_update_root(self): + h = [0, 20, 10, 60, 30, 50, 40] + h_updated = [10, 20, 35, 60, 30, 50, 40] + q = self._make_mapped_queue(h) + removed = q.update(0, 35) + assert q.heap == h_updated + + +class TestMappedDict(TestMappedQueue): + def _make_mapped_queue(self, h): + priority_dict = {elt: elt for elt in h} + return MappedQueue(priority_dict) + + def test_init(self): + d = {5: 0, 4: 1, "a": 2, 2: 3, 1: 4} + q = MappedQueue(d) + assert q.position == d + + def test_ties(self): + d = {5: 0, 4: 1, 3: 2, 2: 3, 1: 4} + q = MappedQueue(d) + assert q.position == {elt: pos for pos, elt in enumerate(q.heap)} + + def test_pop(self): + d = {5: 0, 4: 1, 3: 2, 2: 3, 1: 4} + q = MappedQueue(d) + assert q.pop() == _HeapElement(0, 5) + assert q.position == {elt: pos for pos, elt in enumerate(q.heap)} + + def test_empty_pop(self): + q = MappedQueue() + pytest.raises(IndexError, q.pop) + + def test_incomparable_ties(self): + d = {5: 0, 4: 0, "a": 0, 2: 0, 1: 0} + pytest.raises(TypeError, MappedQueue, d) + + def test_push(self): + to_push = [6, 1, 4, 3, 2, 5, 0] + h_sifted = [0, 2, 1, 6, 3, 5, 4] + q = MappedQueue() + for elt in to_push: + q.push(elt, priority=elt) + assert q.heap == h_sifted + self._check_map(q) + + def test_push_duplicate(self): + to_push = [2, 1, 0] + h_sifted = [0, 2, 1] + q = MappedQueue() + for elt in to_push: + inserted = q.push(elt, priority=elt) + assert inserted + assert q.heap == h_sifted + self._check_map(q) + inserted = q.push(1, priority=1) + assert not inserted + + def test_update_leaf(self): + h = [0, 20, 10, 60, 30, 50, 40] + h_updated = [0, 15, 10, 60, 20, 50, 40] + q = self._make_mapped_queue(h) + removed = q.update(30, 15, priority=15) + assert q.heap == h_updated + + def test_update_root(self): + h = [0, 20, 10, 60, 30, 50, 40] + h_updated = [10, 20, 35, 60, 30, 50, 40] + q = self._make_mapped_queue(h) + removed = q.update(0, 35, priority=35) + assert q.heap == h_updated diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_misc.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_misc.py new file mode 100644 index 0000000..f9d70d9 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_misc.py @@ -0,0 +1,268 @@ +import random +from copy import copy + +import pytest + +import networkx as nx +from networkx.utils import ( + PythonRandomInterface, + PythonRandomViaNumpyBits, + arbitrary_element, + create_py_random_state, + create_random_state, + dict_to_numpy_array, + discrete_sequence, + flatten, + groups, + make_list_of_ints, + pairwise, + powerlaw_sequence, +) +from networkx.utils.misc import _dict_to_numpy_array1, _dict_to_numpy_array2 + +nested_depth = ( + 1, + 2, + (3, 4, ((5, 6, (7,), (8, (9, 10), 11), (12, 13, (14, 15)), 16), 17), 18, 19), + 20, +) + +nested_set = { + (1, 2, 3, 4), + (5, 6, 7, 8, 9), + (10, 11, (12, 13, 14), (15, 16, 17, 18)), + 19, + 20, +} + +nested_mixed = [ + 1, + (2, 3, {4, (5, 6), 7}, [8, 9]), + {10: "foo", 11: "bar", (12, 13): "baz"}, + {(14, 15): "qwe", 16: "asd"}, + (17, (18, "19"), 20), +] + + +@pytest.mark.parametrize("result", [None, [], ["existing"], ["existing1", "existing2"]]) +@pytest.mark.parametrize("nested", [nested_depth, nested_mixed, nested_set]) +def test_flatten(nested, result): + if result is None: + val = flatten(nested, result) + assert len(val) == 20 + else: + _result = copy(result) # because pytest passes parameters as is + nexisting = len(_result) + val = flatten(nested, _result) + assert len(val) == len(_result) == 20 + nexisting + + assert issubclass(type(val), tuple) + + +def test_make_list_of_ints(): + mylist = [1, 2, 3.0, 42, -2] + assert make_list_of_ints(mylist) is mylist + assert make_list_of_ints(mylist) == mylist + assert isinstance(make_list_of_ints(mylist)[2], int) + pytest.raises(nx.NetworkXError, make_list_of_ints, [1, 2, 3, "kermit"]) + pytest.raises(nx.NetworkXError, make_list_of_ints, [1, 2, 3.1]) + + +def test_random_number_distribution(): + # smoke test only + z = powerlaw_sequence(20, exponent=2.5) + z = discrete_sequence(20, distribution=[0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3]) + + +class TestNumpyArray: + @classmethod + def setup_class(cls): + global np + np = pytest.importorskip("numpy") + + def test_numpy_to_list_of_ints(self): + a = np.array([1, 2, 3], dtype=np.int64) + b = np.array([1.0, 2, 3]) + c = np.array([1.1, 2, 3]) + assert isinstance(make_list_of_ints(a), list) + assert make_list_of_ints(b) == list(b) + B = make_list_of_ints(b) + assert isinstance(B[0], int) + pytest.raises(nx.NetworkXError, make_list_of_ints, c) + + def test__dict_to_numpy_array1(self): + d = {"a": 1, "b": 2} + a = _dict_to_numpy_array1(d, mapping={"a": 0, "b": 1}) + np.testing.assert_allclose(a, np.array([1, 2])) + a = _dict_to_numpy_array1(d, mapping={"b": 0, "a": 1}) + np.testing.assert_allclose(a, np.array([2, 1])) + + a = _dict_to_numpy_array1(d) + np.testing.assert_allclose(a.sum(), 3) + + def test__dict_to_numpy_array2(self): + d = {"a": {"a": 1, "b": 2}, "b": {"a": 10, "b": 20}} + + mapping = {"a": 1, "b": 0} + a = _dict_to_numpy_array2(d, mapping=mapping) + np.testing.assert_allclose(a, np.array([[20, 10], [2, 1]])) + + a = _dict_to_numpy_array2(d) + np.testing.assert_allclose(a.sum(), 33) + + def test_dict_to_numpy_array_a(self): + d = {"a": {"a": 1, "b": 2}, "b": {"a": 10, "b": 20}} + + mapping = {"a": 0, "b": 1} + a = dict_to_numpy_array(d, mapping=mapping) + np.testing.assert_allclose(a, np.array([[1, 2], [10, 20]])) + + mapping = {"a": 1, "b": 0} + a = dict_to_numpy_array(d, mapping=mapping) + np.testing.assert_allclose(a, np.array([[20, 10], [2, 1]])) + + a = _dict_to_numpy_array2(d) + np.testing.assert_allclose(a.sum(), 33) + + def test_dict_to_numpy_array_b(self): + d = {"a": 1, "b": 2} + + mapping = {"a": 0, "b": 1} + a = dict_to_numpy_array(d, mapping=mapping) + np.testing.assert_allclose(a, np.array([1, 2])) + + a = _dict_to_numpy_array1(d) + np.testing.assert_allclose(a.sum(), 3) + + +def test_pairwise(): + nodes = range(4) + node_pairs = [(0, 1), (1, 2), (2, 3)] + node_pairs_cycle = node_pairs + [(3, 0)] + assert list(pairwise(nodes)) == node_pairs + assert list(pairwise(iter(nodes))) == node_pairs + assert list(pairwise(nodes, cyclic=True)) == node_pairs_cycle + empty_iter = iter(()) + assert list(pairwise(empty_iter)) == [] + empty_iter = iter(()) + assert list(pairwise(empty_iter, cyclic=True)) == [] + + +def test_groups(): + many_to_one = dict(zip("abcde", [0, 0, 1, 1, 2])) + actual = groups(many_to_one) + expected = {0: {"a", "b"}, 1: {"c", "d"}, 2: {"e"}} + assert actual == expected + assert {} == groups({}) + + +def test_create_random_state(): + np = pytest.importorskip("numpy") + rs = np.random.RandomState + + assert isinstance(create_random_state(1), rs) + assert isinstance(create_random_state(None), rs) + assert isinstance(create_random_state(np.random), rs) + assert isinstance(create_random_state(rs(1)), rs) + # Support for numpy.random.Generator + rng = np.random.default_rng() + assert isinstance(create_random_state(rng), np.random.Generator) + pytest.raises(ValueError, create_random_state, "a") + + assert np.all(rs(1).rand(10) == create_random_state(1).rand(10)) + + +def test_create_py_random_state(): + pyrs = random.Random + + assert isinstance(create_py_random_state(1), pyrs) + assert isinstance(create_py_random_state(None), pyrs) + assert isinstance(create_py_random_state(pyrs(1)), pyrs) + pytest.raises(ValueError, create_py_random_state, "a") + + np = pytest.importorskip("numpy") + + rs = np.random.RandomState + rng = np.random.default_rng(1000) + rng_explicit = np.random.Generator(np.random.SFC64()) + old_nprs = PythonRandomInterface + nprs = PythonRandomViaNumpyBits + assert isinstance(create_py_random_state(np.random), nprs) + assert isinstance(create_py_random_state(rs(1)), old_nprs) + assert isinstance(create_py_random_state(rng), nprs) + assert isinstance(create_py_random_state(rng_explicit), nprs) + # test default rng input + assert isinstance(PythonRandomInterface(), old_nprs) + assert isinstance(PythonRandomViaNumpyBits(), nprs) + + # VeryLargeIntegers Smoke test (they raise error for np.random) + int64max = 9223372036854775807 # from np.iinfo(np.int64).max + for r in (rng, rs(1)): + prs = create_py_random_state(r) + prs.randrange(3, int64max + 5) + prs.randint(3, int64max + 5) + + +def test_PythonRandomInterface_RandomState(): + np = pytest.importorskip("numpy") + + seed = 42 + rs = np.random.RandomState + rng = PythonRandomInterface(rs(seed)) + rs42 = rs(seed) + + # make sure these functions are same as expected outcome + assert rng.randrange(3, 5) == rs42.randint(3, 5) + assert rng.choice([1, 2, 3]) == rs42.choice([1, 2, 3]) + assert rng.gauss(0, 1) == rs42.normal(0, 1) + assert rng.expovariate(1.5) == rs42.exponential(1 / 1.5) + assert np.all(rng.shuffle([1, 2, 3]) == rs42.shuffle([1, 2, 3])) + assert np.all( + rng.sample([1, 2, 3], 2) == rs42.choice([1, 2, 3], (2,), replace=False) + ) + assert np.all( + [rng.randint(3, 5) for _ in range(100)] + == [rs42.randint(3, 6) for _ in range(100)] + ) + assert rng.random() == rs42.random_sample() + + +def test_PythonRandomInterface_Generator(): + np = pytest.importorskip("numpy") + + seed = 42 + rng = np.random.default_rng(seed) + pri = PythonRandomInterface(np.random.default_rng(seed)) + + # make sure these functions are same as expected outcome + assert pri.randrange(3, 5) == rng.integers(3, 5) + assert pri.choice([1, 2, 3]) == rng.choice([1, 2, 3]) + assert pri.gauss(0, 1) == rng.normal(0, 1) + assert pri.expovariate(1.5) == rng.exponential(1 / 1.5) + assert np.all(pri.shuffle([1, 2, 3]) == rng.shuffle([1, 2, 3])) + assert np.all( + pri.sample([1, 2, 3], 2) == rng.choice([1, 2, 3], (2,), replace=False) + ) + assert np.all( + [pri.randint(3, 5) for _ in range(100)] + == [rng.integers(3, 6) for _ in range(100)] + ) + assert pri.random() == rng.random() + + +@pytest.mark.parametrize( + ("iterable_type", "expected"), ((list, 1), (tuple, 1), (str, "["), (set, 1)) +) +def test_arbitrary_element(iterable_type, expected): + iterable = iterable_type([1, 2, 3]) + assert arbitrary_element(iterable) == expected + + +@pytest.mark.parametrize( + "iterator", + ((i for i in range(3)), iter([1, 2, 3])), # generator +) +def test_arbitrary_element_raises(iterator): + """Value error is raised when input is an iterator.""" + with pytest.raises(ValueError, match="from an iterator"): + arbitrary_element(iterator) diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_random_sequence.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_random_sequence.py new file mode 100644 index 0000000..1d1b957 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_random_sequence.py @@ -0,0 +1,38 @@ +import pytest + +from networkx.utils import ( + powerlaw_sequence, + random_weighted_sample, + weighted_choice, + zipf_rv, +) + + +def test_degree_sequences(): + seq = powerlaw_sequence(10, seed=1) + seq = powerlaw_sequence(10) + assert len(seq) == 10 + + +def test_zipf_rv(): + r = zipf_rv(2.3, xmin=2, seed=1) + r = zipf_rv(2.3, 2, 1) + r = zipf_rv(2.3) + assert type(r), int + pytest.raises(ValueError, zipf_rv, 0.5) + pytest.raises(ValueError, zipf_rv, 2, xmin=0) + + +def test_random_weighted_sample(): + mapping = {"a": 10, "b": 20} + s = random_weighted_sample(mapping, 2, seed=1) + s = random_weighted_sample(mapping, 2) + assert sorted(s) == sorted(mapping.keys()) + pytest.raises(ValueError, random_weighted_sample, mapping, 3) + + +def test_random_weighted_choice(): + mapping = {"a": 10, "b": 0} + c = weighted_choice(mapping, seed=1) + c = weighted_choice(mapping) + assert c == "a" diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_rcm.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_rcm.py new file mode 100644 index 0000000..88702b3 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_rcm.py @@ -0,0 +1,63 @@ +import networkx as nx +from networkx.utils import reverse_cuthill_mckee_ordering + + +def test_reverse_cuthill_mckee(): + # example graph from + # http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp + G = nx.Graph( + [ + (0, 3), + (0, 5), + (1, 2), + (1, 4), + (1, 6), + (1, 9), + (2, 3), + (2, 4), + (3, 5), + (3, 8), + (4, 6), + (5, 6), + (5, 7), + (6, 7), + ] + ) + rcm = list(reverse_cuthill_mckee_ordering(G)) + assert rcm in [[0, 8, 5, 7, 3, 6, 2, 4, 1, 9], [0, 8, 5, 7, 3, 6, 4, 2, 1, 9]] + + +def test_rcm_alternate_heuristic(): + # example from + G = nx.Graph( + [ + (0, 0), + (0, 4), + (1, 1), + (1, 2), + (1, 5), + (1, 7), + (2, 2), + (2, 4), + (3, 3), + (3, 6), + (4, 4), + (5, 5), + (5, 7), + (6, 6), + (7, 7), + ] + ) + + answers = [ + [6, 3, 5, 7, 1, 2, 4, 0], + [6, 3, 7, 5, 1, 2, 4, 0], + [7, 5, 1, 2, 4, 0, 6, 3], + ] + + def smallest_degree(G): + deg, node = min((d, n) for n, d in G.degree()) + return node + + rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree)) + assert rcm in answers diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_unionfind.py b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_unionfind.py new file mode 100644 index 0000000..2d30580 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/tests/test_unionfind.py @@ -0,0 +1,55 @@ +import networkx as nx + + +def test_unionfind(): + # Fixed by: 2cddd5958689bdecdcd89b91ac9aaf6ce0e4f6b8 + # Previously (in 2.x), the UnionFind class could handle mixed types. + # But in Python 3.x, this causes a TypeError such as: + # TypeError: unorderable types: str() > int() + # + # Now we just make sure that no exception is raised. + x = nx.utils.UnionFind() + x.union(0, "a") + + +def test_subtree_union(): + # See https://github.com/networkx/networkx/pull/3224 + # (35db1b551ee65780794a357794f521d8768d5049). + # Test if subtree unions hare handled correctly by to_sets(). + uf = nx.utils.UnionFind() + uf.union(1, 2) + uf.union(3, 4) + uf.union(4, 5) + uf.union(1, 5) + assert list(uf.to_sets()) == [{1, 2, 3, 4, 5}] + + +def test_unionfind_weights(): + # Tests if weights are computed correctly with unions of many elements + uf = nx.utils.UnionFind() + uf.union(1, 4, 7) + uf.union(2, 5, 8) + uf.union(3, 6, 9) + uf.union(1, 2, 3, 4, 5, 6, 7, 8, 9) + assert uf.weights[uf[1]] == 9 + + +def test_unbalanced_merge_weights(): + # Tests if the largest set's root is used as the new root when merging + uf = nx.utils.UnionFind() + uf.union(1, 2, 3) + uf.union(4, 5, 6, 7, 8, 9) + assert uf.weights[uf[1]] == 3 + assert uf.weights[uf[4]] == 6 + largest_root = uf[4] + uf.union(1, 4) + assert uf[1] == largest_root + assert uf.weights[largest_root] == 9 + + +def test_empty_union(): + # Tests if a null-union does nothing. + uf = nx.utils.UnionFind((0, 1)) + uf.union() + assert uf[0] == 0 + assert uf[1] == 1 diff --git a/.venv/lib/python3.12/site-packages/networkx/utils/union_find.py b/.venv/lib/python3.12/site-packages/networkx/utils/union_find.py new file mode 100644 index 0000000..2a07129 --- /dev/null +++ b/.venv/lib/python3.12/site-packages/networkx/utils/union_find.py @@ -0,0 +1,106 @@ +""" +Union-find data structure. +""" + +from networkx.utils import groups + + +class UnionFind: + """Union-find data structure. + + Each unionFind instance X maintains a family of disjoint sets of + hashable objects, supporting the following two methods: + + - X[item] returns a name for the set containing the given item. + Each set is named by an arbitrarily-chosen one of its members; as + long as the set remains unchanged it will keep the same name. If + the item is not yet part of a set in X, a new singleton set is + created for it. + + - X.union(item1, item2, ...) merges the sets containing each item + into a single larger set. If any item is not yet part of a set + in X, it is added to X as one of the members of the merged set. + + Union-find data structure. Based on Josiah Carlson's code, + https://code.activestate.com/recipes/215912/ + with significant additional changes by D. Eppstein. + http://www.ics.uci.edu/~eppstein/PADS/UnionFind.py + + """ + + def __init__(self, elements=None): + """Create a new empty union-find structure. + + If *elements* is an iterable, this structure will be initialized + with the discrete partition on the given set of elements. + + """ + if elements is None: + elements = () + self.parents = {} + self.weights = {} + for x in elements: + self.weights[x] = 1 + self.parents[x] = x + + def __getitem__(self, object): + """Find and return the name of the set containing the object.""" + + # check for previously unknown object + if object not in self.parents: + self.parents[object] = object + self.weights[object] = 1 + return object + + # find path of objects leading to the root + path = [] + root = self.parents[object] + while root != object: + path.append(object) + object = root + root = self.parents[object] + + # compress the path and return + for ancestor in path: + self.parents[ancestor] = root + return root + + def __iter__(self): + """Iterate through all items ever found or unioned by this structure.""" + return iter(self.parents) + + def to_sets(self): + """Iterates over the sets stored in this structure. + + For example:: + + >>> partition = UnionFind("xyz") + >>> sorted(map(sorted, partition.to_sets())) + [['x'], ['y'], ['z']] + >>> partition.union("x", "y") + >>> sorted(map(sorted, partition.to_sets())) + [['x', 'y'], ['z']] + + """ + # Ensure fully pruned paths + for x in self.parents: + _ = self[x] # Evaluated for side-effect only + + yield from groups(self.parents).values() + + def union(self, *objects): + """Find the sets containing the objects and merge them all.""" + # Find the heaviest root according to its weight. + roots = iter( + sorted( + {self[x] for x in objects}, key=lambda r: self.weights[r], reverse=True + ) + ) + try: + root = next(roots) + except StopIteration: + return + + for r in roots: + self.weights[root] += self.weights[r] + self.parents[r] = root -- cgit v1.2.3